gt stringclasses 1 value | context stringlengths 2.05k 161k |
|---|---|
package cz.metacentrum.perun.webgui.tabs.servicestabs;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.user.client.ui.CheckBox;
import com.google.gwt.user.client.ui.FlexTable;
import com.google.gwt.user.client.ui.FlexTable.FlexCellFormatter;
import com.google.gwt.user.client.ui.HasHorizontalAlignment;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.SimplePanel;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.Widget;
import cz.metacentrum.perun.webgui.client.PerunWebSession;
import cz.metacentrum.perun.webgui.client.localization.ButtonTranslation;
import cz.metacentrum.perun.webgui.client.resources.ButtonType;
import cz.metacentrum.perun.webgui.client.resources.SmallIcons;
import cz.metacentrum.perun.webgui.client.resources.Utils;
import cz.metacentrum.perun.webgui.json.JsonCallbackEvents;
import cz.metacentrum.perun.webgui.json.JsonUtils;
import cz.metacentrum.perun.webgui.json.servicesManager.UpdateService;
import cz.metacentrum.perun.webgui.model.Service;
import cz.metacentrum.perun.webgui.tabs.TabItem;
import cz.metacentrum.perun.webgui.widgets.CustomButton;
import cz.metacentrum.perun.webgui.widgets.ExtendedTextBox;
import cz.metacentrum.perun.webgui.widgets.TabMenu;
/**
* !! USE ONLY AS INNER TAB !!!
* Edit Service details tab
*
* @author Pavel Zlamal <256627@mail.muni.cz>
*/
public class EditServiceDetailsTabItem implements TabItem {
/**
* Perun web session
*/
private PerunWebSession session = PerunWebSession.getInstance();
/**
* Content widget - should be simple panel
*/
private SimplePanel contentWidget = new SimplePanel();
/**
* Title widget
*/
private Label titleWidget = new Label("Edit: ");
/**
* Data
*/
private Service service;
private ButtonTranslation buttonTranslation = ButtonTranslation.INSTANCE;
private JsonCallbackEvents events;
/**
* Creates a tab instance
*
* @param service
* @param event
*/
public EditServiceDetailsTabItem(Service service, JsonCallbackEvents event){
this.service = service;
this.events = event;
}
public boolean isPrepared(){
return (service != null);
}
@Override
public boolean isRefreshParentOnClose() {
return false;
}
@Override
public void onClose() {
}
public Widget draw() {
titleWidget = new Label("Edit service");
VerticalPanel vp = new VerticalPanel();
final ExtendedTextBox serviceName = new ExtendedTextBox();
final ExtendedTextBox serviceDescription = new ExtendedTextBox();
final ExtendedTextBox scriptPath = new ExtendedTextBox();
final CheckBox enabled = new CheckBox();
final ExtendedTextBox delay = new ExtendedTextBox();
final ExtendedTextBox recurrence = new ExtendedTextBox();
serviceName.getTextBox().setText(service.getName());
serviceDescription.getTextBox().setText(service.getDescription());
scriptPath.getTextBox().setText(service.getScriptPath());
enabled.setValue(service.isEnabled());
delay.getTextBox().setText(String.valueOf(service.getDelay()));
recurrence.getTextBox().setText(String.valueOf(service.getRecurrence()));
final ExtendedTextBox.TextBoxValidator validator = new ExtendedTextBox.TextBoxValidator() {
@Override
public boolean validateTextBox() {
if (serviceName.getTextBox().getText().trim().isEmpty()) {
serviceName.setError("Name can't be empty");
return false;
} else if (!serviceName.getTextBox().getText().trim().matches(Utils.SERVICE_NAME_MATCHER)) {
serviceName.setError("Name can contain only letters, numbers and underscore.");
return false;
} else {
serviceName.setOk();
// fill script path on service name change
scriptPath.getTextBox().setValue("./"+serviceName.getTextBox().getText().trim().toLowerCase().replaceAll(Utils.SERVICE_NAME_TO_SCRIP_PATH_MATCHER,"_"));
return true;
}
}
};
serviceName.setValidator(validator);
enabled.setText("Enabled / Disabled");
final ExtendedTextBox.TextBoxValidator delayValidator = new ExtendedTextBox.TextBoxValidator() {
@Override
public boolean validateTextBox() {
if (!JsonUtils.checkParseInt(delay.getTextBox().getText().trim())) {
delay.setError("Delay must be a number (time in minutes) !");
return false;
} else {
delay.setOk();
return true;
}
}
};
delay.setValidator(delayValidator);
final ExtendedTextBox.TextBoxValidator recurrenceValidator = new ExtendedTextBox.TextBoxValidator() {
@Override
public boolean validateTextBox() {
if (!JsonUtils.checkParseInt(delay.getTextBox().getText().trim())) {
recurrence.setError("Recurrence must be a number!");
return false;
} else {
recurrence.setOk();
return true;
}
}
};
recurrence.setValidator(recurrenceValidator);
final ExtendedTextBox.TextBoxValidator scriptValidator = new ExtendedTextBox.TextBoxValidator() {
@Override
public boolean validateTextBox() {
if (scriptPath.getTextBox().getText().trim().isEmpty()) {
scriptPath.setError("Script path can't be empty !");
return false;
} else {
scriptPath.setOk();
return true;
}
}
};
scriptPath.setValidator(scriptValidator);
// prepares layout
FlexTable layout = new FlexTable();
layout.setStyleName("inputFormFlexTable");
FlexCellFormatter cellFormatter = layout.getFlexCellFormatter();
// close tab events
final TabItem tab = this;
TabMenu menu = new TabMenu();
// send button
final CustomButton saveButton = TabMenu.getPredefinedButton(ButtonType.SAVE, buttonTranslation.saveServiceDetails());
saveButton.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
if (validator.validateTextBox() && delayValidator.validateTextBox() && scriptValidator.validateTextBox() && recurrenceValidator.validateTextBox()) {
Service serv = JsonUtils.clone(service).cast();
serv.setName(serviceName.getTextBox().getText().trim());
String desc = serviceDescription.getTextBox().getText().trim();
if (desc.isEmpty()) desc = null;
serv.setDescription(desc);
serv.setDelay(Integer.parseInt(delay.getTextBox().getText().trim()));
serv.setRecurrence(Integer.parseInt(recurrence.getTextBox().getText().trim()));
serv.setEnabled(enabled.getValue());
serv.setScriptPath(scriptPath.getTextBox().getText().trim());
UpdateService request = new UpdateService(JsonCallbackEvents.closeTabDisableButtonEvents(saveButton, tab, true, events));
request.updateService(serv);
}
}
});
// cancel button
final CustomButton cancelButton = TabMenu.getPredefinedButton(ButtonType.CANCEL, "");
cancelButton.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent clickEvent) {
session.getTabManager().closeTab(tab, isRefreshParentOnClose());
}
});
// Add some standard form options
// fill form
layout.setHTML(0, 0, "Name:");
layout.setHTML(1, 0, "Description:");
layout.setHTML(2, 0, "Status:");
layout.setHTML(3, 0, "Delay:");
layout.setHTML(4, 0, "Recurrence:");
layout.setHTML(5, 0, "Script path:");
layout.setWidget(0, 1, serviceName);
layout.setWidget(1, 1, serviceDescription);
layout.setWidget(2, 1, enabled);
layout.setWidget(3, 1, delay);
layout.setWidget(4, 1, recurrence);
layout.setWidget(5, 1, scriptPath);
for (int i=0; i<layout.getRowCount(); i++) {
cellFormatter.addStyleName(i, 0, "itemName");
}
menu.addWidget(saveButton);
menu.addWidget(cancelButton);
vp.add(layout);
vp.add(menu);
vp.setCellHorizontalAlignment(menu, HasHorizontalAlignment.ALIGN_RIGHT);
this.contentWidget.setWidget(vp);
return getWidget();
}
public Widget getWidget() {
return this.contentWidget;
}
public Widget getTitle() {
return this.titleWidget;
}
public ImageResource getIcon() {
return SmallIcons.INSTANCE.applicationFormEditIcon();
}
@Override
public int hashCode() {
final int prime = 593441861;
int result = 1;
result = prime * result + 6786786;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EditServiceDetailsTabItem other = (EditServiceDetailsTabItem) obj;
if (service != other.service)
return false;
return true;
}
public boolean multipleInstancesEnabled() {
return false;
}
public void open() { }
public boolean isAuthorized() {
if (session.isPerunAdmin()) {
return true;
} else {
return false;
}
}
}
| |
/*
* Copyright 2014 Matthias Einwag
*
* The jawampa authors license this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package ws.wamp.jawampa;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.group.ChannelGroup;
import io.netty.channel.group.DefaultChannelGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ThreadFactory;
import rx.Scheduler;
import rx.schedulers.Schedulers;
import ws.wamp.jawampa.WampMessages.*;
import ws.wamp.jawampa.internal.IdGenerator;
import ws.wamp.jawampa.internal.IdValidator;
import ws.wamp.jawampa.internal.RealmConfig;
import ws.wamp.jawampa.internal.UriValidator;
import ws.wamp.jawampa.internal.Version;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* The {@link WampRouter} provides Dealer and Broker functionality for the WAMP
* protocol.<br>
*/
public class WampRouter {
final static Set<WampRoles> SUPPORTED_CLIENT_ROLES;
static {
SUPPORTED_CLIENT_ROLES = new HashSet<WampRoles>();
SUPPORTED_CLIENT_ROLES.add(WampRoles.Caller);
SUPPORTED_CLIENT_ROLES.add(WampRoles.Callee);
SUPPORTED_CLIENT_ROLES.add(WampRoles.Publisher);
SUPPORTED_CLIENT_ROLES.add(WampRoles.Subscriber);
}
/** Represents a realm that is exposed through the router */
static class Realm {
final RealmConfig config;
final List<WampRouterHandler> channels = new ArrayList<WampRouterHandler>();
final Map<String, Procedure> procedures = new HashMap<String, Procedure>();
// Fields that are used for implementing subscription functionality
final EnumMap<SubscriptionFlags, Map<String, Subscription>> subscriptionsByFlags
= new EnumMap<SubscriptionFlags, Map<String, Subscription>>(SubscriptionFlags.class);
final Map<Long, Subscription> subscriptionsById = new HashMap<Long, Subscription>();
long lastUsedSubscriptionId = IdValidator.MIN_VALID_ID;
public Realm(RealmConfig config) {
this.config = config;
subscriptionsByFlags.put(SubscriptionFlags.Exact, new HashMap<String, Subscription>());
subscriptionsByFlags.put(SubscriptionFlags.Prefix, new HashMap<String, Subscription>());
subscriptionsByFlags.put(SubscriptionFlags.Wildcard, new HashMap<String, Subscription>());
}
void includeChannel(WampRouterHandler channel, long sessionId, Set<WampRoles> roles) {
channels.add(channel);
channel.realm = this;
channel.sessionId = sessionId;
channel.roles = roles;
}
void removeChannel(WampRouterHandler channel, boolean removeFromList) {
if (channel.realm == null) return;
if (channel.subscriptionsById != null) {
// Remove the channels subscriptions from our subscription table
for (Subscription sub : channel.subscriptionsById.values()) {
sub.subscribers.remove(channel);
if (sub.subscribers.isEmpty()) {
// Subscription is no longer used by any client
subscriptionsByFlags.get(sub.flags).remove(sub.topic);
subscriptionsById.remove(sub.subscriptionId);
}
}
channel.subscriptionsById.clear();
channel.subscriptionsById = null;
}
if (channel.providedProcedures != null) {
// Remove the clients procedures from our procedure table
for (Procedure proc : channel.providedProcedures.values()) {
// Clear all pending invocations and thereby inform other clients
// that the proc has gone away
for (Invocation invoc : proc.pendingCalls) {
if (invoc.caller.state != RouterHandlerState.Open) continue;
ErrorMessage errMsg = new ErrorMessage(CallMessage.ID, invoc.callRequestId,
null, ApplicationError.NO_SUCH_PROCEDURE, null, null);
invoc.caller.ctx.writeAndFlush(errMsg);
}
proc.pendingCalls.clear();
// Remove the procedure from the realm
procedures.remove(proc.procName);
}
channel.providedProcedures = null;
channel.pendingInvocations = null;
}
channel.realm = null;
channel.roles.clear();
channel.roles = null;
channel.sessionId = 0;
if (removeFromList) {
channels.remove(channel);
}
}
}
static class Procedure {
final String procName;
final WampRouterHandler provider;
final long registrationId;
final List<Invocation> pendingCalls = new ArrayList<WampRouter.Invocation>();
public Procedure(String name, WampRouterHandler provider, long registrationId) {
this.procName = name;
this.provider = provider;
this.registrationId = registrationId;
}
}
static class Invocation {
Procedure procedure;
long callRequestId;
WampRouterHandler caller;
long invocationRequestId;
}
static class Subscription {
final String topic;
final SubscriptionFlags flags;
final String components[]; // non-null only for wildcard type
final long subscriptionId;
final Set<WampRouterHandler> subscribers;
public Subscription(String topic, SubscriptionFlags flags, long subscriptionId) {
this.topic = topic;
this.flags = flags;
this.components = flags == SubscriptionFlags.Wildcard ? topic.split("\\.", -1) : null;
this.subscriptionId = subscriptionId;
this.subscribers = new HashSet<WampRouterHandler>();
}
}
final EventLoopGroup eventLoop;
final Scheduler scheduler;
final ObjectMapper objectMapper = new ObjectMapper();
boolean isDisposed = false;
final Map<String, Realm> realms;
final ChannelGroup idleChannels;
/**
* Returns the (singlethreaded) EventLoop on which this router is running.<br>
* This is required by other Netty ChannelHandlers that want to forward messages
* to the router.
*/
public EventLoopGroup eventLoop() {
return eventLoop;
}
/**
* Returns the Jackson {@link ObjectMapper} that is used for JSON serialization,
* deserialization and object mapping by this router.
*/
public ObjectMapper objectMapper() {
return objectMapper;
}
WampRouter(Map<String, RealmConfig> realms) {
// Populate the realms from the configuration
this.realms = new HashMap<String, Realm>();
for (Map.Entry<String, RealmConfig> e : realms.entrySet()) {
Realm info = new Realm(e.getValue());
this.realms.put(e.getKey(), info);
}
// Create an eventloop and the RX scheduler on top of it
this.eventLoop = new NioEventLoopGroup(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r, "WampRouterEventLoop");
t.setDaemon(true);
return t;
}
});
this.scheduler = Schedulers.from(eventLoop);
idleChannels = new DefaultChannelGroup(eventLoop.next());
}
/**
* Closes the router.<br>
* This will shut down all realm that are registered to the router.
* All connections to clients on the realm will be closed.<br>
* However pending calls will be completed through an error message
* as far as possible.
*/
public void close() {
if (eventLoop.isShuttingDown() || eventLoop.isShutdown()) return;
eventLoop.execute(new Runnable() {
@Override
public void run() {
if (isDisposed) return;
isDisposed = true;
// Close all currently connected channels
idleChannels.close();
idleChannels.clear();
for (Realm ri : realms.values()) {
for (WampRouterHandler channel : ri.channels) {
ri.removeChannel(channel, false);
channel.markAsClosed();
GoodbyeMessage goodbye = new GoodbyeMessage(null, ApplicationError.SYSTEM_SHUTDOWN);
channel.ctx.writeAndFlush(goodbye).addListener(ChannelFutureListener.CLOSE);
}
ri.channels.clear();
}
eventLoop.shutdownGracefully();
}
});
}
public ChannelHandler createRouterHandler() {
return new WampRouterHandler();
}
enum RouterHandlerState {
Open,
Closed
}
class WampRouterHandler extends SimpleChannelInboundHandler<WampMessage> {
public RouterHandlerState state = RouterHandlerState.Open;
ChannelHandlerContext ctx;
long sessionId;
Realm realm;
Set<WampRoles> roles;
/**
* Procedures that this channel provides.<br>
* Key is the registration ID, Value is the procedure
*/
Map<Long, Procedure> providedProcedures;
Map<Long, Invocation> pendingInvocations;
/** The Set of subscriptions to which this channel is subscribed */
Map<Long, Subscription> subscriptionsById;
long lastUsedId = IdValidator.MIN_VALID_ID;
void markAsClosed() {
state = RouterHandlerState.Closed;
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
// System.out.println("Router handler added on thread " + Thread.currentThread().getId());
this.ctx = ctx;
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
// System.out.println("Router handler active on thread " + Thread.currentThread().getId());
if (state != RouterHandlerState.Open) return;
if (isDisposed) {
// Got an incoming connection after the router has already shut down.
// Therefore we close the connection
state = RouterHandlerState.Closed;
ctx.writeAndFlush(Unpooled.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE);
} else {
idleChannels.add(ctx.channel());
}
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
// System.out.println("Router handler inactive on thread " + Thread.currentThread().getId());
if (isDisposed || state != RouterHandlerState.Open) return;
markAsClosed();
if (realm != null) {
realm.removeChannel(this, true);
} else {
idleChannels.remove(ctx.channel());
}
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, WampMessage msg) throws Exception {
//System.out.println("Router channel read on thread " + Thread.currentThread().getId());
if (isDisposed || state != RouterHandlerState.Open) return;
if (realm == null) {
onMessageFromUnregisteredChannel(this, msg);
} else {
onMessageFromRegisteredChannel(this, msg);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
if (isDisposed || state != RouterHandlerState.Open) return;
if (realm != null) {
closeActiveChannel(this, null);
} else {
closePassiveChannel(this);
}
}
}
private void onMessageFromRegisteredChannel(WampRouterHandler handler, WampMessage msg) {
// TODO: Validate roles for all relevant messages
if (msg instanceof HelloMessage || msg instanceof WelcomeMessage) {
// The client sent hello but it was already registered -> This is an error
// If the client sends welcome it's also an error
closeActiveChannel(handler, new GoodbyeMessage(null, ApplicationError.INVALID_ARGUMENT));
} else if (msg instanceof AbortMessage || msg instanceof GoodbyeMessage) {
// The client wants to leave the realm
// Remove the channel from the realm
handler.realm.removeChannel(handler, true);
// But add it to the list of passive channels
idleChannels.add(handler.ctx.channel());
// Echo the message in case of goodbye
if (msg instanceof GoodbyeMessage) {
GoodbyeMessage reply = new GoodbyeMessage(null, ApplicationError.GOODBYE_AND_OUT);
handler.ctx.writeAndFlush(reply);
}
} else if (msg instanceof CallMessage) {
// The client wants to call a remote function
// Verify the message
CallMessage call = (CallMessage) msg;
String err = null;
if (!UriValidator.tryValidate(call.procedure, handler.realm.config.useStrictUriValidation)) {
// Client sent an invalid URI
err = ApplicationError.INVALID_URI;
}
if (err == null && !(IdValidator.isValidId(call.requestId))) {
// Client sent an invalid request ID
err = ApplicationError.INVALID_ARGUMENT;
}
Procedure proc = null;
if (err == null) {
proc = handler.realm.procedures.get(call.procedure);
if (proc == null) err = ApplicationError.NO_SUCH_PROCEDURE;
}
if (err != null) { // If we have an error send that to the client
ErrorMessage errMsg = new ErrorMessage(CallMessage.ID, call.requestId,
null, err, null, null);
handler.ctx.writeAndFlush(errMsg);
return;
}
// Everything checked, we can forward the call to the provider
Invocation invoc = new Invocation();
invoc.callRequestId = call.requestId;
invoc.caller = handler;
invoc.procedure = proc;
invoc.invocationRequestId = IdGenerator.newLinearId(proc.provider.lastUsedId,
proc.provider.pendingInvocations);
proc.provider.lastUsedId = invoc.invocationRequestId;
// Store the invocation
proc.provider.pendingInvocations.put(invoc.invocationRequestId, invoc);
// Store the call in the procedure to return error if client unregisters
proc.pendingCalls.add(invoc);
// And send it to the provider
InvocationMessage imsg = new InvocationMessage(invoc.invocationRequestId,
proc.registrationId, null, call.arguments, call.argumentsKw);
proc.provider.ctx.writeAndFlush(imsg);
} else if (msg instanceof YieldMessage) {
// The clients sends as the result of an RPC
// Verify the message
YieldMessage yield = (YieldMessage) msg;
if (!(IdValidator.isValidId(yield.requestId))) return;
// Look up the invocation to find the original caller
if (handler.pendingInvocations == null) return; // If a client send a yield without an invocation, return
Invocation invoc = handler.pendingInvocations.get(yield.requestId);
if (invoc == null) return; // There is no invocation pending under this ID
handler.pendingInvocations.remove(yield.requestId);
invoc.procedure.pendingCalls.remove(invoc);
// Send the result to the original caller
ResultMessage result = new ResultMessage(invoc.callRequestId, null, yield.arguments, yield.argumentsKw);
invoc.caller.ctx.writeAndFlush(result);
} else if (msg instanceof ErrorMessage) {
ErrorMessage err = (ErrorMessage) msg;
if (!(IdValidator.isValidId(err.requestId))) {
return;
}
if (err.requestType == InvocationMessage.ID) {
if (!UriValidator.tryValidate(err.error, handler.realm.config.useStrictUriValidation)) {
// The Message provider has sent us an invalid URI for the error string
// We better don't forward it but instead close the connection, which will
// give the original caller an unknown message error
closeActiveChannel(handler, new GoodbyeMessage(null, ApplicationError.INVALID_ARGUMENT));
return;
}
// Look up the invocation to find the original caller
if (handler.pendingInvocations == null) return; // if an error is send before an invocation, do not do anything
Invocation invoc = handler.pendingInvocations.get(err.requestId);
if (invoc == null) return; // There is no invocation pending under this ID
handler.pendingInvocations.remove(err.requestId);
invoc.procedure.pendingCalls.remove(invoc);
// Send the result to the original caller
ErrorMessage fwdError = new ErrorMessage(CallMessage.ID, invoc.callRequestId,
null, err.error, err.arguments, err.argumentsKw);
invoc.caller.ctx.writeAndFlush(fwdError);
}
// else TODO: Are there any other possibilities where a client could return ERROR
} else if (msg instanceof RegisterMessage) {
// The client wants to register a procedure
// Verify the message
RegisterMessage reg = (RegisterMessage) msg;
String err = null;
if (!UriValidator.tryValidate(reg.procedure, handler.realm.config.useStrictUriValidation)) {
// Client sent an invalid URI
err = ApplicationError.INVALID_URI;
}
if (err == null && !(IdValidator.isValidId(reg.requestId))) {
// Client sent an invalid request ID
err = ApplicationError.INVALID_ARGUMENT;
}
Procedure proc = null;
if (err == null) {
proc = handler.realm.procedures.get(reg.procedure);
if (proc != null) err = ApplicationError.PROCEDURE_ALREADY_EXISTS;
}
if (err != null) { // If we have an error send that to the client
ErrorMessage errMsg = new ErrorMessage(RegisterMessage.ID, reg.requestId,
null, err, null, null);
handler.ctx.writeAndFlush(errMsg);
return;
}
// Everything checked, we can register the caller as the procedure provider
long registrationId = IdGenerator.newLinearId(handler.lastUsedId, handler.providedProcedures);
handler.lastUsedId = registrationId;
Procedure procInfo = new Procedure(reg.procedure, handler, registrationId);
// Insert new procedure
handler.realm.procedures.put(reg.procedure, procInfo);
if (handler.providedProcedures == null) {
handler.providedProcedures = new HashMap<Long, WampRouter.Procedure>();
handler.pendingInvocations = new HashMap<Long, WampRouter.Invocation>();
}
handler.providedProcedures.put(procInfo.registrationId, procInfo);
RegisteredMessage response = new RegisteredMessage(reg.requestId, procInfo.registrationId);
handler.ctx.writeAndFlush(response);
} else if (msg instanceof UnregisterMessage) {
// The client wants to unregister a procedure
// Verify the message
UnregisterMessage unreg = (UnregisterMessage) msg;
String err = null;
if (!(IdValidator.isValidId(unreg.requestId))
|| !(IdValidator.isValidId(unreg.registrationId))) {
// Client sent an invalid request or registration ID
err = ApplicationError.INVALID_ARGUMENT;
}
Procedure proc = null;
if (err == null) {
if (handler.providedProcedures != null) {
proc = handler.providedProcedures.get(unreg.registrationId);
}
// Check whether the procedure exists AND if the caller is the owner
// If the caller is not the owner it might be an attack, so we don't
// disclose that the procedure exists.
if (proc == null) {
err = ApplicationError.NO_SUCH_REGISTRATION;
}
}
if (err != null) { // If we have an error send that to the client
ErrorMessage errMsg = new ErrorMessage(UnregisterMessage.ID, unreg.requestId,
null, err, null, null);
handler.ctx.writeAndFlush(errMsg);
return;
}
// Mark pending calls to this procedure as failed
for (Invocation invoc : proc.pendingCalls) {
handler.pendingInvocations.remove(invoc.invocationRequestId);
if (invoc.caller.state == RouterHandlerState.Open) {
ErrorMessage errMsg = new ErrorMessage(CallMessage.ID, invoc.callRequestId,
null, ApplicationError.NO_SUCH_PROCEDURE, null, null);
invoc.caller.ctx.writeAndFlush(errMsg);
}
}
proc.pendingCalls.clear();
// Remove the procedure from the realm and the handler
handler.realm.procedures.remove(proc.procName);
handler.providedProcedures.remove(proc.registrationId);
if (handler.providedProcedures.size() == 0) {
handler.providedProcedures = null;
handler.pendingInvocations = null;
}
// Send the acknowledge
UnregisteredMessage response = new UnregisteredMessage(unreg.requestId);
handler.ctx.writeAndFlush(response);
} else if (msg instanceof SubscribeMessage) {
// The client wants to subscribe to a procedure
// Verify the message
SubscribeMessage sub = (SubscribeMessage) msg;
String err = null;
// Find subscription match type
SubscriptionFlags flags = SubscriptionFlags.Exact;
if (sub.options != null) {
JsonNode match = sub.options.get("match");
if (match != null) {
String matchValue = match.asText();
if ("prefix".equals(matchValue)) {
flags = SubscriptionFlags.Prefix;
} else if ("wildcard".equals(matchValue)) {
flags = SubscriptionFlags.Wildcard;
}
}
}
if (flags == SubscriptionFlags.Exact) {
if (!UriValidator.tryValidate(sub.topic, handler.realm.config.useStrictUriValidation)) {
// Client sent an invalid URI
err = ApplicationError.INVALID_URI;
}
} else if (flags == SubscriptionFlags.Prefix) {
if (!UriValidator.tryValidatePrefix(sub.topic, handler.realm.config.useStrictUriValidation)) {
// Client sent an invalid URI
err = ApplicationError.INVALID_URI;
}
} else if (flags == SubscriptionFlags.Wildcard) {
if (!UriValidator.tryValidateWildcard(sub.topic, handler.realm.config.useStrictUriValidation)) {
// Client sent an invalid URI
err = ApplicationError.INVALID_URI;
}
}
if (err == null && !(IdValidator.isValidId(sub.requestId))) {
// Client sent an invalid request ID
err = ApplicationError.INVALID_ARGUMENT;
}
if (err != null) { // If we have an error send that to the client
ErrorMessage errMsg = new ErrorMessage(SubscribeMessage.ID, sub.requestId,
null, err, null, null);
handler.ctx.writeAndFlush(errMsg);
return;
}
// Create a new subscription map for the client if it was not subscribed before
if (handler.subscriptionsById == null) {
handler.subscriptionsById = new HashMap<Long, WampRouter.Subscription>();
}
// Search if a subscription from any client on the realm to this topic exists
Map<String, Subscription> subscriptionMap = handler.realm.subscriptionsByFlags.get(flags);
Subscription subscription = subscriptionMap.get(sub.topic);
if (subscription == null) {
// No client was subscribed to this URI up to now
// Create a new subscription id
long subscriptionId = IdGenerator.newLinearId(handler.realm.lastUsedSubscriptionId,
handler.realm.subscriptionsById);
handler.realm.lastUsedSubscriptionId = subscriptionId;
// Create and add the new subscription
subscription = new Subscription(sub.topic, flags, subscriptionId);
subscriptionMap.put(sub.topic, subscription);
handler.realm.subscriptionsById.put(subscriptionId, subscription);
}
// We check if the client is already subscribed to this topic by trying to add the
// new client as a receiver. If the client is already a receiver we do nothing
// (already subscribed and already stored in handler.subscriptionsById). Calling
// add to check and add is more efficient than checking with contains first.
// If the client was already subscribed this will return the same subscriptionId
// than as for the last subscription.
// See discussion in https://groups.google.com/forum/#!topic/wampws/kC878Ngc9Z0
if (subscription.subscribers.add(handler)) {
// Add the subscription on the client
handler.subscriptionsById.put(subscription.subscriptionId, subscription);
}
SubscribedMessage response = new SubscribedMessage(sub.requestId, subscription.subscriptionId);
handler.ctx.writeAndFlush(response);
} else if (msg instanceof UnsubscribeMessage) {
// The client wants to cancel a subscription
// Verify the message
UnsubscribeMessage unsub = (UnsubscribeMessage) msg;
String err = null;
if (!(IdValidator.isValidId(unsub.requestId))
|| !(IdValidator.isValidId(unsub.subscriptionId))) {
// Client sent an invalid request or registration ID
err = ApplicationError.INVALID_ARGUMENT;
}
Subscription s = null;
if (err == null) {
// Check whether such a subscription exists and fetch the topic name
if (handler.subscriptionsById != null) {
s = handler.subscriptionsById.get(unsub.subscriptionId);
}
if (s == null) {
err = ApplicationError.NO_SUCH_SUBSCRIPTION;
}
}
if (err != null) { // If we have an error send that to the client
ErrorMessage errMsg = new ErrorMessage(UnsubscribeMessage.ID, unsub.requestId,
null, err, null, null);
handler.ctx.writeAndFlush(errMsg);
return;
}
// Remove the channel as an receiver from the subscription
s.subscribers.remove(handler);
// Remove the subscription from the handler
handler.subscriptionsById.remove(s.subscriptionId);
if (handler.subscriptionsById.isEmpty()) {
handler.subscriptionsById = null;
}
// Remove the subscription from the realm if no subscriber is left
if (s.subscribers.isEmpty()) {
handler.realm.subscriptionsByFlags.get(s.flags).remove(s.topic);
handler.realm.subscriptionsById.remove(s.subscriptionId);
}
// Send the acknowledge
UnsubscribedMessage response = new UnsubscribedMessage(unsub.requestId);
handler.ctx.writeAndFlush(response);
} else if (msg instanceof PublishMessage) {
// The client wants to publish something to all subscribers (apart from himself)
PublishMessage pub = (PublishMessage) msg;
// Check whether the client wants an acknowledgement for the publication
// Default is no
boolean sendAcknowledge = false;
JsonNode ackOption = pub.options.get("acknowledge");
if (ackOption != null && ackOption.asBoolean() == true)
sendAcknowledge = true;
String err = null;
if (!UriValidator.tryValidate(pub.topic, handler.realm.config.useStrictUriValidation)) {
// Client sent an invalid URI
err = ApplicationError.INVALID_URI;
}
if (err == null && !(IdValidator.isValidId(pub.requestId))) {
// Client sent an invalid request ID
err = ApplicationError.INVALID_ARGUMENT;
}
if (err != null) { // If we have an error send that to the client
ErrorMessage errMsg = new ErrorMessage(PublishMessage.ID, pub.requestId,
null, err, null, null);
if (sendAcknowledge) {
handler.ctx.writeAndFlush(errMsg);
}
return;
}
long publicationId = IdGenerator.newRandomId(null); // Store that somewhere?
// Get the subscriptions for this topic on the realm
Subscription exactSubscription = handler.realm.subscriptionsByFlags.get(SubscriptionFlags.Exact).get(pub.topic);
if (exactSubscription != null) {
publishEvent(handler, pub, publicationId, exactSubscription);
}
Map<String, Subscription> prefixSubscriptionMap = handler.realm.subscriptionsByFlags.get(SubscriptionFlags.Prefix);
for (Subscription prefixSubscription : prefixSubscriptionMap.values()) {
if (pub.topic.startsWith(prefixSubscription.topic)) {
publishEvent(handler, pub, publicationId, prefixSubscription);
}
}
Map<String, Subscription> wildcardSubscriptionMap = handler.realm.subscriptionsByFlags.get(SubscriptionFlags.Wildcard);
String[] components = pub.topic.split("\\.", -1);
for (Subscription wildcardSubscription : wildcardSubscriptionMap.values()) {
boolean matched = true;
if (components.length == wildcardSubscription.components.length) {
for (int i=0; i < components.length; i++) {
if (wildcardSubscription.components[i].length() > 0
&& !components[i].equals(wildcardSubscription.components[i])) {
matched = false;
break;
}
}
}else
matched = false;
if (matched) {
publishEvent(handler, pub, publicationId, wildcardSubscription);
}
}
if (sendAcknowledge) {
PublishedMessage response = new PublishedMessage(pub.requestId, publicationId);
handler.ctx.writeAndFlush(response);
}
}
}
private void publishEvent(WampRouterHandler publisher, PublishMessage pub, long publicationId, Subscription subscription){
ObjectNode details = null;
if (subscription.flags != SubscriptionFlags.Exact) {
details = objectMapper.createObjectNode();
details.put("topic", pub.topic);
}
EventMessage ev = new EventMessage(subscription.subscriptionId, publicationId,
details, pub.arguments, pub.argumentsKw);
for (WampRouterHandler receiver : subscription.subscribers) {
if (receiver == publisher ) { // Potentially skip the publisher
boolean skipPublisher = true;
if (pub.options != null) {
JsonNode excludeMeNode = pub.options.get("exclude_me");
if (excludeMeNode != null) {
skipPublisher = excludeMeNode.asBoolean(true);
}
}
if (skipPublisher) continue;
}
// Publish the event to the subscriber
receiver.ctx.writeAndFlush(ev);
}
}
private void onMessageFromUnregisteredChannel(WampRouterHandler channelHandler, WampMessage msg)
{
// Only HELLO is allowed when a channel is not registered
if (!(msg instanceof HelloMessage)) {
// Close the connection
closePassiveChannel(channelHandler);
return;
}
HelloMessage hello = (HelloMessage) msg;
String errorMsg = null;
Realm realm = null;
if (!UriValidator.tryValidate(hello.realm, false)) {
errorMsg = ApplicationError.INVALID_URI;
} else {
realm = realms.get(hello.realm);
if (realm == null) {
errorMsg = ApplicationError.NO_SUCH_REALM;
}
}
if (errorMsg != null) {
AbortMessage abort = new AbortMessage(null, errorMsg);
channelHandler.ctx.writeAndFlush(abort);
return;
}
Set<WampRoles> roles = new HashSet<WampRoles>();
boolean hasUnsupportedRoles = false;
JsonNode n = hello.details.get("roles");
if (n != null && n.isObject()) {
ObjectNode rolesNode = (ObjectNode) n;
Iterator<String> roleKeys = rolesNode.fieldNames();
while (roleKeys.hasNext()) {
WampRoles role = WampRoles.fromString(roleKeys.next());
if (!SUPPORTED_CLIENT_ROLES.contains(role)) hasUnsupportedRoles = true;
if (role != null) roles.add(role);
}
}
if (roles.size() == 0 || hasUnsupportedRoles) {
AbortMessage abort = new AbortMessage(null, ApplicationError.NO_SUCH_ROLE);
channelHandler.ctx.writeAndFlush(abort);
return;
}
long sessionId = IdGenerator.newRandomId(null);
// TODO: Should be unique on the router and should be stored somewhere
// Include the channel into the realm
realm.includeChannel(channelHandler, sessionId, roles);
// Remove the channel from the idle channel list - It is no longer idle
idleChannels.remove(channelHandler.ctx.channel());
// Expose the roles that are configured for the realm
ObjectNode welcomeDetails = objectMapper.createObjectNode();
welcomeDetails.put("agent", Version.getVersion());
ObjectNode routerRoles = welcomeDetails.putObject("roles");
for (WampRoles role : realm.config.roles) {
ObjectNode roleNode = routerRoles.putObject(role.toString());
if (role == WampRoles.Publisher) {
ObjectNode featuresNode = roleNode.putObject("features");
featuresNode.put("publisher_exclusion", true);
} else if (role == WampRoles.Subscriber) {
ObjectNode featuresNode = roleNode.putObject("features");
featuresNode.put("pattern_based_subscription", true);
}
}
// Respond with the WELCOME message
WelcomeMessage welcome = new WelcomeMessage(channelHandler.sessionId, welcomeDetails);
channelHandler.ctx.writeAndFlush(welcome);
}
private void closeActiveChannel(WampRouterHandler channel, WampMessage closeMessage) {
if (channel == null) return;
channel.realm.removeChannel(channel, true);
channel.markAsClosed();
if (channel.ctx != null) {
Object m = (closeMessage == null) ? Unpooled.EMPTY_BUFFER : closeMessage;
channel.ctx.writeAndFlush(m)
.addListener(ChannelFutureListener.CLOSE);
}
}
private void closePassiveChannel(WampRouterHandler channelHandler) {
idleChannels.remove(channelHandler.ctx.channel());
channelHandler.markAsClosed();
channelHandler.ctx.close();
}
}
| |
/*
* The MIT License
*
* Copyright 2015 Jesse Glick.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package jenkins.tasks;
import hudson.AbortException;
import hudson.EnvVars;
import hudson.FilePath;
import hudson.Launcher;
import hudson.console.ConsoleLogFilter;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Action;
import hudson.model.BuildListener;
import hudson.model.Computer;
import hudson.model.Run;
import hudson.model.TaskListener;
import hudson.tasks.BuildWrapper;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Serializable;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import edu.umd.cs.findbugs.annotations.CheckForNull;
import edu.umd.cs.findbugs.annotations.NonNull;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.Beta;
/**
* A generalization of {@link BuildWrapper} that, like {@link SimpleBuildStep}, may be called at various points within a build.
* <p>Such a build wrapper would typically be written according to make few assumptions about how it is being used.
* Some hints about this refactoring:
* <ul>
* <li>Replace {@link AbstractBuild#getWorkspace} with the provided path.
* <li>Replace {@link AbstractBuild#getProject} with {@link Run#getParent}.
* <li>Use {@link FilePath#toComputer} rather than {@link Computer#currentComputer}.
* <li>Do not bother with {@link AbstractBuild#getBuildVariables} if you are not passed an {@link AbstractBuild} (treat it like an empty map).
* <li>The {@link Disposer} must be safely serializable. This means it should be a {@code static} class if nested, and define a {@code serialVersionUID}.
* </ul>
* @since 1.599
*/
@SuppressWarnings("rawtypes") // inherited
public abstract class SimpleBuildWrapper extends BuildWrapper {
/**
* Determines whether or not this wrapper requires a workspace context (working directory and launcher).
* <p>
* When such a context is required (the default), {@link #setUp(Context, Run, FilePath, Launcher, TaskListener, EnvVars)} applies.
* Otherwise, {@link #setUp(Context, Run, TaskListener, EnvVars)} applies.
*
* @return {@code true} if this wrapper requires a workspace context; {@code false} otherwise.
* @since 2.258
*/
public boolean requiresWorkspace() {
return true;
}
/**
* Called when a segment of a build is started that is to be enhanced with this wrapper.
* <p>
* This method <strong>must</strong> be overridden when this wrapper requires a workspace context. If such a context
* is <em>not</em> required, it does not need to be overridden; it will then forward to
* {@link #setUp(Context, Run, TaskListener, EnvVars)}.
*
* @param context a way of collecting modifications to the environment for nested steps
* @param build a build being run
* @param workspace a workspace of the build
* @param launcher a way to start commands
* @param listener a way to report progress
* @param initialEnvironment the environment variables set at the outset
* @throws AbstractMethodError if this wrapper requires a workspace context, and this method is not overridden
* @throws IOException if something fails; {@link AbortException} for user errors
* @throws InterruptedException if setup is interrupted
*/
public void setUp(Context context, Run<?,?> build, FilePath workspace, Launcher launcher, TaskListener listener, EnvVars initialEnvironment) throws IOException, InterruptedException {
// If this does not require a workspace, defer to the version that does not take a workspace and launcher.
if (!this.requiresWorkspace()) {
this.setUp(context, build, listener, initialEnvironment);
return;
}
throw new AbstractMethodError("Unless a build wrapper is marked as not requiring a workspace context, you must implement the overload of the setUp() method that takes both a workspace and a launcher.");
}
/**
* Called when a segment of a build is started that is to be enhanced with this wrapper.
* <p>
* This method <strong>must</strong> be overridden when this wrapper does not require a workspace context, and will
* not be called when such a context <em>is</em> required.
*
* @param context a way of collecting modifications to the environment for nested steps
* @param build a build being run
* @param listener a way to report progress
* @param initialEnvironment the environment variables set at the outset
* @throws AbstractMethodError if this method is not overridden
* @throws IllegalStateException if this wrapper requires a workspace context
* @throws IOException if something fails; {@link AbortException} for user errors
* @throws InterruptedException if setup is interrupted
* @since 2.258
*/
public void setUp(Context context, Run<?,?> build, TaskListener listener, EnvVars initialEnvironment) throws IOException, InterruptedException {
// If this wrapper requires a workspace, this is the wrong method to call.
if (this.requiresWorkspace()) {
throw new IllegalStateException("This build wrapper requires a workspace context, but none was provided.");
}
// Otherwise, this method must have an implementation.
throw new AbstractMethodError("When a build wrapper is marked as not requiring a workspace context, you must implement the overload of the setUp() method that does not take a workspace or launcher.");
}
/**
* Creates a new {@link Context} for use with this wrapper.
*
* @return a new {@link Context} instance
*/
@Restricted(Beta.class) // to indicate it is to be called by Jenkins internals only; not part of the normal API
public Context createContext() {
return new Context(this.requiresWorkspace());
}
/**
* Parameter passed to {@link #setUp} to allow an implementation to specify its behavior after the initial setup.
*/
public static final class Context {
private Disposer disposer;
private final Map<String,String> env = new HashMap<>();
private final @CheckForNull Boolean wrapperRequiresWorkspace;
/**
* Creates a new context.
*
* @deprecated Use {@link SimpleBuildWrapper#createContext()} instead, so that this context can tell whether or
* not a disposer will require a workspace context.
*/
@Deprecated
public Context() {
this.wrapperRequiresWorkspace = null;
}
/**
* Creates a context.
*
* @param wrapperRequiresWorkspace Indicates whether the wrapper for which this context was created requires a
* workspace context, which in turn determines the same for any {@link Disposer} set on this context.
*/
private Context(boolean wrapperRequiresWorkspace) {
this.wrapperRequiresWorkspace = wrapperRequiresWorkspace;
}
/**
* Specify an environment variable override to apply to processes launched within the block.
* If unspecified, environment variables will be inherited unmodified.
* @param key handles the special {@code PATH+SOMETHING} syntax as in {@link EnvVars#override}
*/
public void env(String key, String value) {
if (env.containsKey(key)) {
throw new IllegalStateException("just one binding for " + key);
}
env.put(key, value);
}
public @CheckForNull Disposer getDisposer() {
return disposer;
}
public @NonNull Map<String,String> getEnv() {
return env;
}
/**
* Specify an action to take when the block ends.
* If not specified, nothing special happens.
*/
public void setDisposer(@NonNull Disposer disposer) {
if (this.disposer != null) {
throw new IllegalStateException("just one disposer");
}
// Assumption: the provided Disposer instance is only associated with one wrapper at a time, or at least
// only with wrappers that agree on whether or not a workspace is required.
disposer.wrapperRequiresWorkspace = this.wrapperRequiresWorkspace;
this.disposer = disposer;
}
}
/**
* An optional callback to run at the end of the wrapped block.
* Must be safely serializable, so it receives runtime context comparable to that of the original setup.
*/
public abstract static class Disposer implements Serializable {
@CheckForNull
private Boolean wrapperRequiresWorkspace;
/**
* Determines whether or not this end-of-wrapped-block callback requires a workspace context (working
* directory and launcher).
* <p>
* When such a context is required (the default), then {@link #tearDown(Run, FilePath, Launcher, TaskListener)} applies.
* Otherwise, {@link #tearDown(Run, TaskListener)} applies.
*
* @return {@code true} when this end-of-wrapped-block callback requires a workspace context; {@code false} otherwise.
* @since 2.258
*/
@Restricted(Beta.class) // to indicate it is to be called by Jenkins internals only; not part of the normal API
public final boolean requiresWorkspace() {
return this.wrapperRequiresWorkspace == null || this.wrapperRequiresWorkspace;
}
/**
* Attempt to clean up anything that was done in the initial setup.
* <p>
* This method <strong>must</strong> be overridden when this end-of-wrapped-block callback requires a workspace
* context. If such a context is <em>not</em> required, it does not need to be overridden; it will then forward
* to {@link #tearDown(Run, TaskListener)}.
*
* @param build a build being run
* @param workspace a workspace of the build
* @param launcher a way to start commands
* @param listener a way to report progress
* @throws AbstractMethodError if this end-of-wrapped-block callback requires a workspace and this method is not overridden.
* @throws IOException if something fails; {@link AbortException} for user errors
* @throws InterruptedException if tear down is interrupted
*/
public void tearDown(Run<?,?> build, FilePath workspace, Launcher launcher, TaskListener listener) throws IOException, InterruptedException {
// If this does not require a workspace, defer to the version that does not take a workspace and launcher.
if (!this.requiresWorkspace()) {
this.tearDown(build, listener);
return;
}
throw new AbstractMethodError("Unless an end-of-wrapped-block callback is marked as not requiring a workspace context, you must implement the overload of the tearDown() method that takes both a workspace and a launcher.");
}
/**
* Attempt to clean up anything that was done in the initial setup.
* <p>
* This method <strong>must</strong> be overridden when this end-of-wrapped-block callback does not require a
* workspace context, and will not be called when such a context <em>is</em> required.
*
* @param build a build being run
* @param listener a way to report progress
* @throws AbstractMethodError if this this method is not overridden
* @throws IllegalStateException if this end-of-wrapped-block callback requires a workspace
* @throws IOException if something fails; {@link AbortException} for user errors
* @throws InterruptedException if tear down is interrupted
* @since 2.258
*/
public void tearDown(Run<?,?> build, TaskListener listener) throws IOException, InterruptedException {
// If this callback requires a workspace, this is the wrong method to call.
if (this.requiresWorkspace()) {
throw new IllegalStateException("This end-of-wrapped-block callback requires a workspace context, but none was provided.");
}
// Otherwise, this method must have an implementation.
throw new AbstractMethodError("When an end-of-wrapped-block callback is marked as not requiring a workspace context, you must implement the overload of the tearDown() method that does not take a workspace or launcher.");
}
}
/**
* By default, when run as part of an {@link AbstractBuild}, will run late, in the {@link #setUp(AbstractBuild, Launcher, BuildListener)} phase.
* May be overridden to return true, in which case this will run earlier, in the {@link #preCheckout} phase.
* Ignored when not run as part of an {@link AbstractBuild}.
*/
protected boolean runPreCheckout() {
return false;
}
@Override public final Environment setUp(AbstractBuild build, final Launcher launcher, BuildListener listener) throws IOException, InterruptedException {
if (runPreCheckout()) {
return new Environment() {};
} else {
final Context c = this.createContext();
setUp(c, build, build.getWorkspace(), launcher, listener, build.getEnvironment(listener));
return new EnvironmentWrapper(c, launcher);
}
}
@Override public final void preCheckout(AbstractBuild build, final Launcher launcher, BuildListener listener) throws IOException, InterruptedException {
if (runPreCheckout()) {
final Context c = this.createContext();
setUp(c, build, build.getWorkspace(), launcher, listener, build.getEnvironment(listener));
build.getEnvironments().add(new EnvironmentWrapper(c, launcher));
}
}
private class EnvironmentWrapper extends Environment {
private final Context c;
private final Launcher launcher;
EnvironmentWrapper(Context c, Launcher launcher) {
this.c = c;
this.launcher = launcher;
}
@Override public void buildEnvVars(Map<String,String> env) {
if (env instanceof EnvVars) {
((EnvVars) env).overrideAll(c.env);
} else { // ?
env.putAll(c.env);
}
}
@Override public boolean tearDown(AbstractBuild build, BuildListener listener) throws IOException, InterruptedException {
if (c.disposer != null) {
c.disposer.tearDown(build, build.getWorkspace(), launcher, listener);
}
return true;
}
}
/**
* Allows this wrapper to decorate log output.
* @param build as is passed to {@link #setUp(Context, Run, FilePath, Launcher, TaskListener, EnvVars)}
* @return a filter which ignores its {@code build} parameter and is {@link Serializable}; or null (the default)
* @since 1.608
*/
public @CheckForNull ConsoleLogFilter createLoggerDecorator(@NonNull Run<?,?> build) {
return null;
}
@Override public final OutputStream decorateLogger(AbstractBuild build, OutputStream logger) throws IOException, InterruptedException, Run.RunnerAbortedException {
ConsoleLogFilter filter = createLoggerDecorator(build);
return filter != null ? filter.decorateLogger(build, logger) : logger;
}
/**
* May be overridden but this will only take effect when used as a {@link BuildWrapper} on an {@link AbstractProject}.
* <p>{@inheritDoc}
* @since 1.608
*/
@Override public Launcher decorateLauncher(AbstractBuild build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException, Run.RunnerAbortedException {
return super.decorateLauncher(build, launcher, listener);
// TODO reasonable to decorate Launcher within a dynamic scope, but this signature does not mix well with Context pattern.
// Called from AbstractBuildExecution.createLauncher; how do we track what is decorating what?
// Would have to keep something like a LaunchedDecorator, not an actual Launcher, in Context.
// And createLauncher is called before even preCheckout, so much too early for the Context to have been prepared.
// Could perhaps create a proxy Launcher whose launch method checks some field in the Context remembered for the build.
}
/**
* May be overridden but this will only take effect when used as a {@link BuildWrapper} on an {@link AbstractProject}.
* <p>{@inheritDoc}
* @since 1.608
*/
@Override public void makeBuildVariables(AbstractBuild build, Map<String,String> variables) {
super.makeBuildVariables(build, variables);
}
/**
* May be overridden but this will only take effect when used as a {@link BuildWrapper} on an {@link AbstractProject}.
* <p>{@inheritDoc}
* @since 1.608
*/
@Override public void makeSensitiveBuildVariables(AbstractBuild build, Set<String> sensitiveVariables) {
super.makeSensitiveBuildVariables(build, sensitiveVariables);
// TODO determine if there is a meaningful way to generalize this; perhaps as a new [Run]Action recording sensitiveVariables?
// Complicated by the fact that in principle someone could call getSensitiveBuildVariables *before* the wrapper starts and actually sets those variables,
// though in practice the likely use cases would come later, and perhaps it is acceptable to omit the names of variables which are yet to be set.
// Also unclear if there is any use case for calling this method after the build is done (or Jenkins is restarted);
// most likely it is only used during the build itself.
// Would be much cleaner if EnvVars itself recorded which keys had sensitive values.
}
/**
* @return an empty set; this might never be called if the step is not part of the static configuration of a project; instead, add a {@link SimpleBuildStep.LastBuildAction} to a build when run
*/
@Override public final Collection<? extends Action> getProjectActions(AbstractProject job) {
return Collections.emptySet();
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigateway/v1/apigateway.proto
package com.google.cloud.apigateway.v1;
public final class Apigateway {
private Apigateway() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_Api_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_Api_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_Api_LabelsEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_Api_LabelsEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ApiConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ApiConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ApiConfig_File_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ApiConfig_File_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ApiConfig_OpenApiDocument_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ApiConfig_OpenApiDocument_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ApiConfig_GrpcServiceDefinition_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ApiConfig_GrpcServiceDefinition_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ApiConfig_LabelsEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ApiConfig_LabelsEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_Gateway_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_Gateway_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_Gateway_LabelsEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_Gateway_LabelsEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ListGatewaysRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ListGatewaysRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ListGatewaysResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ListGatewaysResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_GetGatewayRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_GetGatewayRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_CreateGatewayRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_CreateGatewayRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_UpdateGatewayRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_UpdateGatewayRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_DeleteGatewayRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_DeleteGatewayRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ListApisRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ListApisRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ListApisResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ListApisResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_GetApiRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_GetApiRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_CreateApiRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_CreateApiRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_UpdateApiRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_UpdateApiRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_DeleteApiRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_DeleteApiRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ListApiConfigsRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ListApiConfigsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_ListApiConfigsResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_ListApiConfigsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_GetApiConfigRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_GetApiConfigRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_CreateApiConfigRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_CreateApiConfigRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_UpdateApiConfigRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_UpdateApiConfigRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_DeleteApiConfigRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_DeleteApiConfigRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_OperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_OperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_apigateway_v1_OperationMetadata_Diagnostic_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_apigateway_v1_OperationMetadata_Diagnostic_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n+google/cloud/apigateway/v1/apigateway."
+ "proto\022\032google.cloud.apigateway.v1\032\037googl"
+ "e/api/field_behavior.proto\032\031google/api/r"
+ "esource.proto\032 google/protobuf/field_mas"
+ "k.proto\032\037google/protobuf/timestamp.proto"
+ "\032\034google/api/annotations.proto\"\326\004\n\003Api\022\021"
+ "\n\004name\030\001 \001(\tB\003\340A\003\0224\n\013create_time\030\002 \001(\0132\032"
+ ".google.protobuf.TimestampB\003\340A\003\0224\n\013updat"
+ "e_time\030\003 \001(\0132\032.google.protobuf.Timestamp"
+ "B\003\340A\003\022@\n\006labels\030\004 \003(\0132+.google.cloud.api"
+ "gateway.v1.Api.LabelsEntryB\003\340A\001\022\031\n\014displ"
+ "ay_name\030\005 \001(\tB\003\340A\001\022S\n\017managed_service\030\007 "
+ "\001(\tB:\340A\005\340A\001\372A1\n/servicemanagement.google"
+ "apis.com/ManagedService\0229\n\005state\030\014 \001(\0162%"
+ ".google.cloud.apigateway.v1.Api.StateB\003\340"
+ "A\003\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030"
+ "\002 \001(\t:\0028\001\"`\n\005State\022\025\n\021STATE_UNSPECIFIED\020"
+ "\000\022\014\n\010CREATING\020\001\022\n\n\006ACTIVE\020\002\022\n\n\006FAILED\020\003\022"
+ "\014\n\010DELETING\020\004\022\014\n\010UPDATING\020\005:R\352AO\n\035apigat"
+ "eway.googleapis.com/Api\022.projects/{proje"
+ "ct}/locations/global/apis/{api}\"\373\t\n\tApiC"
+ "onfig\022\021\n\004name\030\001 \001(\tB\003\340A\003\0224\n\013create_time\030"
+ "\002 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\0224"
+ "\n\013update_time\030\003 \001(\0132\032.google.protobuf.Ti"
+ "mestampB\003\340A\003\022F\n\006labels\030\004 \003(\01321.google.cl"
+ "oud.apigateway.v1.ApiConfig.LabelsEntryB"
+ "\003\340A\001\022\031\n\014display_name\030\005 \001(\tB\003\340A\001\022J\n\027gatew"
+ "ay_service_account\030\016 \001(\tB)\340A\005\372A#\n!iam.go"
+ "ogleapis.com/ServiceAccount\022K\n\021service_c"
+ "onfig_id\030\014 \001(\tB0\340A\003\372A*\n(servicemanagemen"
+ "t.googleapis.com/Service\022?\n\005state\030\010 \001(\0162"
+ "+.google.cloud.apigateway.v1.ApiConfig.S"
+ "tateB\003\340A\003\022U\n\021openapi_documents\030\t \003(\01325.g"
+ "oogle.cloud.apigateway.v1.ApiConfig.Open"
+ "ApiDocumentB\003\340A\001\022W\n\rgrpc_services\030\n \003(\0132"
+ ";.google.cloud.apigateway.v1.ApiConfig.G"
+ "rpcServiceDefinitionB\003\340A\001\022P\n\027managed_ser"
+ "vice_configs\030\013 \003(\0132*.google.cloud.apigat"
+ "eway.v1.ApiConfig.FileB\003\340A\001\032&\n\004File\022\014\n\004p"
+ "ath\030\001 \001(\t\022\020\n\010contents\030\002 \001(\014\032O\n\017OpenApiDo"
+ "cument\022<\n\010document\030\001 \001(\0132*.google.cloud."
+ "apigateway.v1.ApiConfig.File\032\246\001\n\025GrpcSer"
+ "viceDefinition\022L\n\023file_descriptor_set\030\001 "
+ "\001(\0132*.google.cloud.apigateway.v1.ApiConf"
+ "ig.FileB\003\340A\004\022?\n\006source\030\002 \003(\0132*.google.cl"
+ "oud.apigateway.v1.ApiConfig.FileB\003\340A\001\032-\n"
+ "\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t"
+ ":\0028\001\"p\n\005State\022\025\n\021STATE_UNSPECIFIED\020\000\022\014\n\010"
+ "CREATING\020\001\022\n\n\006ACTIVE\020\002\022\n\n\006FAILED\020\003\022\014\n\010DE"
+ "LETING\020\004\022\014\n\010UPDATING\020\005\022\016\n\nACTIVATING\020\006:m"
+ "\352Aj\n#apigateway.googleapis.com/ApiConfig"
+ "\022Cprojects/{project}/locations/global/ap"
+ "is/{api}/configs/{api_config}\"\375\004\n\007Gatewa"
+ "y\022\021\n\004name\030\001 \001(\tB\003\340A\003\0224\n\013create_time\030\002 \001("
+ "\0132\032.google.protobuf.TimestampB\003\340A\003\0224\n\013up"
+ "date_time\030\003 \001(\0132\032.google.protobuf.Timest"
+ "ampB\003\340A\003\022D\n\006labels\030\004 \003(\0132/.google.cloud."
+ "apigateway.v1.Gateway.LabelsEntryB\003\340A\001\022\031"
+ "\n\014display_name\030\005 \001(\tB\003\340A\001\022?\n\napi_config\030"
+ "\006 \001(\tB+\340A\002\372A%\n#apigateway.googleapis.com"
+ "/ApiConfig\022=\n\005state\030\007 \001(\0162).google.cloud"
+ ".apigateway.v1.Gateway.StateB\003\340A\003\022\035\n\020def"
+ "ault_hostname\030\t \001(\tB\003\340A\003\032-\n\013LabelsEntry\022"
+ "\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"`\n\005State"
+ "\022\025\n\021STATE_UNSPECIFIED\020\000\022\014\n\010CREATING\020\001\022\n\n"
+ "\006ACTIVE\020\002\022\n\n\006FAILED\020\003\022\014\n\010DELETING\020\004\022\014\n\010U"
+ "PDATING\020\005:b\352A_\n!apigateway.googleapis.co"
+ "m/Gateway\022:projects/{project}/locations/"
+ "{location}/gateways/{gateway}\"\231\001\n\023ListGa"
+ "tewaysRequest\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!l"
+ "ocations.googleapis.com/Location\022\021\n\tpage"
+ "_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\022\016\n\006filte"
+ "r\030\004 \001(\t\022\020\n\010order_by\030\005 \001(\t\"\205\001\n\024ListGatewa"
+ "ysResponse\0225\n\010gateways\030\001 \003(\0132#.google.cl"
+ "oud.apigateway.v1.Gateway\022\027\n\017next_page_t"
+ "oken\030\002 \001(\t\022\035\n\025unreachable_locations\030\003 \003("
+ "\t\"L\n\021GetGatewayRequest\0227\n\004name\030\001 \001(\tB)\340A"
+ "\002\372A#\n!apigateway.googleapis.com/Gateway\""
+ "\245\001\n\024CreateGatewayRequest\0229\n\006parent\030\001 \001(\t"
+ "B)\340A\002\372A#\n!locations.googleapis.com/Locat"
+ "ion\022\027\n\ngateway_id\030\002 \001(\tB\003\340A\002\0229\n\007gateway\030"
+ "\003 \001(\0132#.google.cloud.apigateway.v1.Gatew"
+ "ayB\003\340A\002\"\202\001\n\024UpdateGatewayRequest\022/\n\013upda"
+ "te_mask\030\001 \001(\0132\032.google.protobuf.FieldMas"
+ "k\0229\n\007gateway\030\002 \001(\0132#.google.cloud.apigat"
+ "eway.v1.GatewayB\003\340A\002\"O\n\024DeleteGatewayReq"
+ "uest\0227\n\004name\030\001 \001(\tB)\340A\002\372A#\n!apigateway.g"
+ "oogleapis.com/Gateway\"\225\001\n\017ListApisReques"
+ "t\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.goo"
+ "gleapis.com/Location\022\021\n\tpage_size\030\002 \001(\005\022"
+ "\022\n\npage_token\030\003 \001(\t\022\016\n\006filter\030\004 \001(\t\022\020\n\010o"
+ "rder_by\030\005 \001(\t\"y\n\020ListApisResponse\022-\n\004api"
+ "s\030\001 \003(\0132\037.google.cloud.apigateway.v1.Api"
+ "\022\027\n\017next_page_token\030\002 \001(\t\022\035\n\025unreachable"
+ "_locations\030\003 \003(\t\"D\n\rGetApiRequest\0223\n\004nam"
+ "e\030\001 \001(\tB%\340A\002\372A\037\n\035apigateway.googleapis.c"
+ "om/Api\"\225\001\n\020CreateApiRequest\0229\n\006parent\030\001 "
+ "\001(\tB)\340A\002\372A#\n!locations.googleapis.com/Lo"
+ "cation\022\023\n\006api_id\030\002 \001(\tB\003\340A\002\0221\n\003api\030\003 \001(\013"
+ "2\037.google.cloud.apigateway.v1.ApiB\003\340A\002\"v"
+ "\n\020UpdateApiRequest\022/\n\013update_mask\030\001 \001(\0132"
+ "\032.google.protobuf.FieldMask\0221\n\003api\030\002 \001(\013"
+ "2\037.google.cloud.apigateway.v1.ApiB\003\340A\002\"G"
+ "\n\020DeleteApiRequest\0223\n\004name\030\001 \001(\tB%\340A\002\372A\037"
+ "\n\035apigateway.googleapis.com/Api\"\227\001\n\025List"
+ "ApiConfigsRequest\0225\n\006parent\030\001 \001(\tB%\340A\002\372A"
+ "\037\n\035apigateway.googleapis.com/Api\022\021\n\tpage"
+ "_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\022\016\n\006filte"
+ "r\030\004 \001(\t\022\020\n\010order_by\030\005 \001(\t\"\214\001\n\026ListApiCon"
+ "figsResponse\022:\n\013api_configs\030\001 \003(\0132%.goog"
+ "le.cloud.apigateway.v1.ApiConfig\022\027\n\017next"
+ "_page_token\030\002 \001(\t\022\035\n\025unreachable_locatio"
+ "ns\030\003 \003(\t\"\332\001\n\023GetApiConfigRequest\0229\n\004name"
+ "\030\001 \001(\tB+\340A\002\372A%\n#apigateway.googleapis.co"
+ "m/ApiConfig\022H\n\004view\030\003 \001(\0162:.google.cloud"
+ ".apigateway.v1.GetApiConfigRequest.Confi"
+ "gView\">\n\nConfigView\022\033\n\027CONFIG_VIEW_UNSPE"
+ "CIFIED\020\000\022\t\n\005BASIC\020\001\022\010\n\004FULL\020\002\"\253\001\n\026Create"
+ "ApiConfigRequest\0225\n\006parent\030\001 \001(\tB%\340A\002\372A\037"
+ "\n\035apigateway.googleapis.com/Api\022\032\n\rapi_c"
+ "onfig_id\030\002 \001(\tB\003\340A\002\022>\n\napi_config\030\003 \001(\0132"
+ "%.google.cloud.apigateway.v1.ApiConfigB\003"
+ "\340A\002\"\211\001\n\026UpdateApiConfigRequest\022/\n\013update"
+ "_mask\030\001 \001(\0132\032.google.protobuf.FieldMask\022"
+ ">\n\napi_config\030\002 \001(\0132%.google.cloud.apiga"
+ "teway.v1.ApiConfigB\003\340A\002\"S\n\026DeleteApiConf"
+ "igRequest\0229\n\004name\030\001 \001(\tB+\340A\002\372A%\n#apigate"
+ "way.googleapis.com/ApiConfig\"\205\003\n\021Operati"
+ "onMetadata\0224\n\013create_time\030\001 \001(\0132\032.google"
+ ".protobuf.TimestampB\003\340A\003\0221\n\010end_time\030\002 \001"
+ "(\0132\032.google.protobuf.TimestampB\003\340A\003\022\023\n\006t"
+ "arget\030\003 \001(\tB\003\340A\003\022\021\n\004verb\030\004 \001(\tB\003\340A\003\022\033\n\016s"
+ "tatus_message\030\005 \001(\tB\003\340A\003\022#\n\026requested_ca"
+ "ncellation\030\006 \001(\010B\003\340A\003\022\030\n\013api_version\030\007 \001"
+ "(\tB\003\340A\003\022R\n\013diagnostics\030\010 \003(\01328.google.cl"
+ "oud.apigateway.v1.OperationMetadata.Diag"
+ "nosticB\003\340A\003\032/\n\nDiagnostic\022\020\n\010location\030\001 "
+ "\001(\t\022\017\n\007message\030\002 \001(\tB\270\003\n\036com.google.clou"
+ "d.apigateway.v1P\001ZDgoogle.golang.org/gen"
+ "proto/googleapis/cloud/apigateway/v1;api"
+ "gateway\252\002\032Google.Cloud.ApiGateway.V1\312\002\032G"
+ "oogle\\Cloud\\ApiGateway\\V1\352\002\035Google::Clou"
+ "d::ApiGateway::V1\352AY\n!iam.googleapis.com"
+ "/ServiceAccount\0224projects/{project}/serv"
+ "iceAccounts/{service_account}\352AE\n/servic"
+ "emanagement.googleapis.com/ManagedServic"
+ "e\022\022services/{service}\352AO\n(servicemanagem"
+ "ent.googleapis.com/Service\022#services/{se"
+ "rvice}/configs/{config}b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.protobuf.FieldMaskProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
com.google.api.AnnotationsProto.getDescriptor(),
});
internal_static_google_cloud_apigateway_v1_Api_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_apigateway_v1_Api_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_Api_descriptor,
new java.lang.String[] {
"Name",
"CreateTime",
"UpdateTime",
"Labels",
"DisplayName",
"ManagedService",
"State",
});
internal_static_google_cloud_apigateway_v1_Api_LabelsEntry_descriptor =
internal_static_google_cloud_apigateway_v1_Api_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_apigateway_v1_Api_LabelsEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_Api_LabelsEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
internal_static_google_cloud_apigateway_v1_ApiConfig_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_apigateway_v1_ApiConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ApiConfig_descriptor,
new java.lang.String[] {
"Name",
"CreateTime",
"UpdateTime",
"Labels",
"DisplayName",
"GatewayServiceAccount",
"ServiceConfigId",
"State",
"OpenapiDocuments",
"GrpcServices",
"ManagedServiceConfigs",
});
internal_static_google_cloud_apigateway_v1_ApiConfig_File_descriptor =
internal_static_google_cloud_apigateway_v1_ApiConfig_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_apigateway_v1_ApiConfig_File_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ApiConfig_File_descriptor,
new java.lang.String[] {
"Path", "Contents",
});
internal_static_google_cloud_apigateway_v1_ApiConfig_OpenApiDocument_descriptor =
internal_static_google_cloud_apigateway_v1_ApiConfig_descriptor.getNestedTypes().get(1);
internal_static_google_cloud_apigateway_v1_ApiConfig_OpenApiDocument_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ApiConfig_OpenApiDocument_descriptor,
new java.lang.String[] {
"Document",
});
internal_static_google_cloud_apigateway_v1_ApiConfig_GrpcServiceDefinition_descriptor =
internal_static_google_cloud_apigateway_v1_ApiConfig_descriptor.getNestedTypes().get(2);
internal_static_google_cloud_apigateway_v1_ApiConfig_GrpcServiceDefinition_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ApiConfig_GrpcServiceDefinition_descriptor,
new java.lang.String[] {
"FileDescriptorSet", "Source",
});
internal_static_google_cloud_apigateway_v1_ApiConfig_LabelsEntry_descriptor =
internal_static_google_cloud_apigateway_v1_ApiConfig_descriptor.getNestedTypes().get(3);
internal_static_google_cloud_apigateway_v1_ApiConfig_LabelsEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ApiConfig_LabelsEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
internal_static_google_cloud_apigateway_v1_Gateway_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_apigateway_v1_Gateway_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_Gateway_descriptor,
new java.lang.String[] {
"Name",
"CreateTime",
"UpdateTime",
"Labels",
"DisplayName",
"ApiConfig",
"State",
"DefaultHostname",
});
internal_static_google_cloud_apigateway_v1_Gateway_LabelsEntry_descriptor =
internal_static_google_cloud_apigateway_v1_Gateway_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_apigateway_v1_Gateway_LabelsEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_Gateway_LabelsEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
internal_static_google_cloud_apigateway_v1_ListGatewaysRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_apigateway_v1_ListGatewaysRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ListGatewaysRequest_descriptor,
new java.lang.String[] {
"Parent", "PageSize", "PageToken", "Filter", "OrderBy",
});
internal_static_google_cloud_apigateway_v1_ListGatewaysResponse_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_apigateway_v1_ListGatewaysResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ListGatewaysResponse_descriptor,
new java.lang.String[] {
"Gateways", "NextPageToken", "UnreachableLocations",
});
internal_static_google_cloud_apigateway_v1_GetGatewayRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_apigateway_v1_GetGatewayRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_GetGatewayRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_apigateway_v1_CreateGatewayRequest_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_apigateway_v1_CreateGatewayRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_CreateGatewayRequest_descriptor,
new java.lang.String[] {
"Parent", "GatewayId", "Gateway",
});
internal_static_google_cloud_apigateway_v1_UpdateGatewayRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_apigateway_v1_UpdateGatewayRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_UpdateGatewayRequest_descriptor,
new java.lang.String[] {
"UpdateMask", "Gateway",
});
internal_static_google_cloud_apigateway_v1_DeleteGatewayRequest_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_cloud_apigateway_v1_DeleteGatewayRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_DeleteGatewayRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_apigateway_v1_ListApisRequest_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_google_cloud_apigateway_v1_ListApisRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ListApisRequest_descriptor,
new java.lang.String[] {
"Parent", "PageSize", "PageToken", "Filter", "OrderBy",
});
internal_static_google_cloud_apigateway_v1_ListApisResponse_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_google_cloud_apigateway_v1_ListApisResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ListApisResponse_descriptor,
new java.lang.String[] {
"Apis", "NextPageToken", "UnreachableLocations",
});
internal_static_google_cloud_apigateway_v1_GetApiRequest_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_google_cloud_apigateway_v1_GetApiRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_GetApiRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_apigateway_v1_CreateApiRequest_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_google_cloud_apigateway_v1_CreateApiRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_CreateApiRequest_descriptor,
new java.lang.String[] {
"Parent", "ApiId", "Api",
});
internal_static_google_cloud_apigateway_v1_UpdateApiRequest_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_google_cloud_apigateway_v1_UpdateApiRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_UpdateApiRequest_descriptor,
new java.lang.String[] {
"UpdateMask", "Api",
});
internal_static_google_cloud_apigateway_v1_DeleteApiRequest_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_google_cloud_apigateway_v1_DeleteApiRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_DeleteApiRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_apigateway_v1_ListApiConfigsRequest_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_google_cloud_apigateway_v1_ListApiConfigsRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ListApiConfigsRequest_descriptor,
new java.lang.String[] {
"Parent", "PageSize", "PageToken", "Filter", "OrderBy",
});
internal_static_google_cloud_apigateway_v1_ListApiConfigsResponse_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_google_cloud_apigateway_v1_ListApiConfigsResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_ListApiConfigsResponse_descriptor,
new java.lang.String[] {
"ApiConfigs", "NextPageToken", "UnreachableLocations",
});
internal_static_google_cloud_apigateway_v1_GetApiConfigRequest_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_google_cloud_apigateway_v1_GetApiConfigRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_GetApiConfigRequest_descriptor,
new java.lang.String[] {
"Name", "View",
});
internal_static_google_cloud_apigateway_v1_CreateApiConfigRequest_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_google_cloud_apigateway_v1_CreateApiConfigRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_CreateApiConfigRequest_descriptor,
new java.lang.String[] {
"Parent", "ApiConfigId", "ApiConfig",
});
internal_static_google_cloud_apigateway_v1_UpdateApiConfigRequest_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_google_cloud_apigateway_v1_UpdateApiConfigRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_UpdateApiConfigRequest_descriptor,
new java.lang.String[] {
"UpdateMask", "ApiConfig",
});
internal_static_google_cloud_apigateway_v1_DeleteApiConfigRequest_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_google_cloud_apigateway_v1_DeleteApiConfigRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_DeleteApiConfigRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_apigateway_v1_OperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(21);
internal_static_google_cloud_apigateway_v1_OperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_OperationMetadata_descriptor,
new java.lang.String[] {
"CreateTime",
"EndTime",
"Target",
"Verb",
"StatusMessage",
"RequestedCancellation",
"ApiVersion",
"Diagnostics",
});
internal_static_google_cloud_apigateway_v1_OperationMetadata_Diagnostic_descriptor =
internal_static_google_cloud_apigateway_v1_OperationMetadata_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_apigateway_v1_OperationMetadata_Diagnostic_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_apigateway_v1_OperationMetadata_Diagnostic_descriptor,
new java.lang.String[] {
"Location", "Message",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.ResourceProto.resource);
registry.add(com.google.api.ResourceProto.resourceDefinition);
registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.protobuf.FieldMaskProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
com.google.api.AnnotationsProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.instructions.spark;
import java.util.ArrayList;
import java.util.Iterator;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;
import org.apache.sysml.hops.AggBinaryOp.SparkAggType;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.DMLUnsupportedOperationException;
import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
import org.apache.sysml.runtime.instructions.InstructionUtils;
import org.apache.sysml.runtime.instructions.cp.CPOperand;
import org.apache.sysml.runtime.instructions.spark.data.LazyIterableIterator;
import org.apache.sysml.runtime.instructions.spark.data.PartitionedBroadcastMatrix;
import org.apache.sysml.runtime.instructions.spark.functions.IsBlockInRange;
import org.apache.sysml.runtime.instructions.spark.utils.RDDAggregateUtils;
import org.apache.sysml.runtime.instructions.spark.utils.SparkUtils;
import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
import org.apache.sysml.runtime.matrix.data.OperationsOnMatrixValues;
import org.apache.sysml.runtime.matrix.mapred.IndexedMatrixValue;
import org.apache.sysml.runtime.matrix.operators.Operator;
import org.apache.sysml.runtime.matrix.operators.SimpleOperator;
import org.apache.sysml.runtime.util.IndexRange;
import org.apache.sysml.runtime.util.UtilFunctions;
public class MatrixIndexingSPInstruction extends UnarySPInstruction
{
/*
* This class implements the matrix indexing functionality inside CP.
* Example instructions:
* rangeReIndex:mVar1:Var2:Var3:Var4:Var5:mVar6
* input=mVar1, output=mVar6,
* bounds = (Var2,Var3,Var4,Var5)
* rowindex_lower: Var2, rowindex_upper: Var3
* colindex_lower: Var4, colindex_upper: Var5
* leftIndex:mVar1:mVar2:Var3:Var4:Var5:Var6:mVar7
* triggered by "mVar1[Var3:Var4, Var5:Var6] = mVar2"
* the result is stored in mVar7
*
*/
protected CPOperand rowLower, rowUpper, colLower, colUpper;
protected SparkAggType _aggType = null;
public MatrixIndexingSPInstruction(Operator op, CPOperand in, CPOperand rl, CPOperand ru, CPOperand cl, CPOperand cu,
CPOperand out, SparkAggType aggtype, String opcode, String istr)
{
super(op, in, out, opcode, istr);
rowLower = rl;
rowUpper = ru;
colLower = cl;
colUpper = cu;
_aggType = aggtype;
}
public MatrixIndexingSPInstruction(Operator op, CPOperand lhsInput, CPOperand rhsInput, CPOperand rl, CPOperand ru, CPOperand cl, CPOperand cu,
CPOperand out, String opcode, String istr)
{
super(op, lhsInput, rhsInput, out, opcode, istr);
rowLower = rl;
rowUpper = ru;
colLower = cl;
colUpper = cu;
}
public static MatrixIndexingSPInstruction parseInstruction ( String str )
throws DMLRuntimeException
{
String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
String opcode = parts[0];
if ( opcode.equalsIgnoreCase("rangeReIndex") ) {
if ( parts.length == 8 ) {
// Example: rangeReIndex:mVar1:Var2:Var3:Var4:Var5:mVar6
CPOperand in = new CPOperand(parts[1]);
CPOperand rl = new CPOperand(parts[2]);
CPOperand ru = new CPOperand(parts[3]);
CPOperand cl = new CPOperand(parts[4]);
CPOperand cu = new CPOperand(parts[5]);
CPOperand out = new CPOperand(parts[6]);
SparkAggType aggtype = SparkAggType.valueOf(parts[7]);
return new MatrixIndexingSPInstruction(new SimpleOperator(null), in, rl, ru, cl, cu, out, aggtype, opcode, str);
}
else {
throw new DMLRuntimeException("Invalid number of operands in instruction: " + str);
}
}
else if ( opcode.equalsIgnoreCase("leftIndex") || opcode.equalsIgnoreCase("mapLeftIndex")) {
if ( parts.length == 8 ) {
// Example: leftIndex:mVar1:mvar2:Var3:Var4:Var5:Var6:mVar7
CPOperand lhsInput = new CPOperand(parts[1]);
CPOperand rhsInput = new CPOperand(parts[2]);
CPOperand rl = new CPOperand(parts[3]);
CPOperand ru = new CPOperand(parts[4]);
CPOperand cl = new CPOperand(parts[5]);
CPOperand cu = new CPOperand(parts[6]);
CPOperand out = new CPOperand(parts[7]);
return new MatrixIndexingSPInstruction(new SimpleOperator(null), lhsInput, rhsInput, rl, ru, cl, cu, out, opcode, str);
}
else {
throw new DMLRuntimeException("Invalid number of operands in instruction: " + str);
}
}
else {
throw new DMLRuntimeException("Unknown opcode while parsing a MatrixIndexingSPInstruction: " + str);
}
}
@Override
public void processInstruction(ExecutionContext ec)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
SparkExecutionContext sec = (SparkExecutionContext)ec;
String opcode = getOpcode();
//get indexing range
long rl = ec.getScalarInput(rowLower.getName(), rowLower.getValueType(), rowLower.isLiteral()).getLongValue();
long ru = ec.getScalarInput(rowUpper.getName(), rowUpper.getValueType(), rowUpper.isLiteral()).getLongValue();
long cl = ec.getScalarInput(colLower.getName(), colLower.getValueType(), colLower.isLiteral()).getLongValue();
long cu = ec.getScalarInput(colUpper.getName(), colUpper.getValueType(), colUpper.isLiteral()).getLongValue();
IndexRange ixrange = new IndexRange(rl, ru, cl, cu);
//right indexing
if( opcode.equalsIgnoreCase("rangeReIndex") )
{
//update and check output dimensions
MatrixCharacteristics mcIn = sec.getMatrixCharacteristics(input1.getName());
MatrixCharacteristics mcOut = sec.getMatrixCharacteristics(output.getName());
mcOut.set(ru-rl+1, cu-cl+1, mcIn.getRowsPerBlock(), mcIn.getColsPerBlock());
checkValidOutputDimensions(mcOut);
//execute right indexing operation (partitioning-preserving if possible)
JavaPairRDD<MatrixIndexes,MatrixBlock> in1 = sec.getBinaryBlockRDDHandleForVariable( input1.getName() );
JavaPairRDD<MatrixIndexes,MatrixBlock> out = null;
if( isPartitioningPreservingRightIndexing(mcIn, ixrange) ) {
out = in1.mapPartitionsToPair(
new SliceBlockPartitionFunction(ixrange, mcOut), true);
}
else{
out = in1.filter(new IsBlockInRange(rl, ru, cl, cu, mcOut))
.flatMapToPair(new SliceBlock(ixrange, mcOut));
//aggregation if required
if( _aggType != SparkAggType.NONE )
out = RDDAggregateUtils.mergeByKey(out);
}
//put output RDD handle into symbol table
sec.setRDDHandleForVariable(output.getName(), out);
sec.addLineageRDD(output.getName(), input1.getName());
}
//left indexing
else if ( opcode.equalsIgnoreCase("leftIndex") || opcode.equalsIgnoreCase("mapLeftIndex"))
{
JavaPairRDD<MatrixIndexes,MatrixBlock> in1 = sec.getBinaryBlockRDDHandleForVariable( input1.getName() );
PartitionedBroadcastMatrix broadcastIn2 = null;
JavaPairRDD<MatrixIndexes,MatrixBlock> in2 = null;
JavaPairRDD<MatrixIndexes,MatrixBlock> out = null;
//update and check output dimensions
MatrixCharacteristics mcOut = sec.getMatrixCharacteristics(output.getName());
MatrixCharacteristics mcLeft = ec.getMatrixCharacteristics(input1.getName());
mcOut.set(mcLeft.getRows(), mcLeft.getCols(), mcLeft.getRowsPerBlock(), mcLeft.getColsPerBlock());
checkValidOutputDimensions(mcOut);
//note: always matrix rhs, scalars are preprocessed via cast to 1x1 matrix
MatrixCharacteristics mcRight = ec.getMatrixCharacteristics(input2.getName());
//sanity check matching index range and rhs dimensions
if(!mcRight.dimsKnown()) {
throw new DMLRuntimeException("The right input matrix dimensions are not specified for MatrixIndexingSPInstruction");
}
if(!(ru-rl+1 == mcRight.getRows() && cu-cl+1 == mcRight.getCols())) {
throw new DMLRuntimeException("Invalid index range of leftindexing: ["+rl+":"+ru+","+cl+":"+cu+"] vs ["+mcRight.getRows()+"x"+mcRight.getCols()+"]." );
}
if(opcode.equalsIgnoreCase("mapLeftIndex"))
{
broadcastIn2 = sec.getBroadcastForVariable( input2.getName() );
//partitioning-preserving mappartitions (key access required for broadcast loopkup)
out = in1.mapPartitionsToPair(
new LeftIndexPartitionFunction(broadcastIn2, ixrange, mcOut), true);
}
else {
// Zero-out LHS
in1 = in1.mapToPair(new ZeroOutLHS(false, mcLeft.getRowsPerBlock(),
mcLeft.getColsPerBlock(), rl, ru, cl, cu));
// Slice RHS to merge for LHS
in2 = sec.getBinaryBlockRDDHandleForVariable( input2.getName() )
.flatMapToPair(new SliceRHSForLeftIndexing(rl, cl, mcLeft.getRowsPerBlock(), mcLeft.getColsPerBlock(), mcLeft.getRows(), mcLeft.getCols()));
out = RDDAggregateUtils.mergeByKey(in1.union(in2));
}
sec.setRDDHandleForVariable(output.getName(), out);
sec.addLineageRDD(output.getName(), input1.getName());
if( broadcastIn2 != null)
sec.addLineageBroadcast(output.getName(), input2.getName());
if(in2 != null)
sec.addLineageRDD(output.getName(), input2.getName());
}
else
throw new DMLRuntimeException("Invalid opcode (" + opcode +") encountered in MatrixIndexingSPInstruction.");
}
/**
*
* @param mcOut
* @throws DMLRuntimeException
*/
private static void checkValidOutputDimensions(MatrixCharacteristics mcOut)
throws DMLRuntimeException
{
if(!mcOut.dimsKnown()) {
throw new DMLRuntimeException("MatrixIndexingSPInstruction: The updated output dimensions are invalid: " + mcOut);
}
}
/**
*
* @param mcIn
* @param ixrange
* @return
*/
private boolean isPartitioningPreservingRightIndexing(MatrixCharacteristics mcIn, IndexRange ixrange)
{
return ( mcIn.dimsKnown() &&
(ixrange.rowStart==1 && ixrange.rowEnd==mcIn.getRows() && mcIn.getCols()<=mcIn.getColsPerBlock() ) //1-1 column block indexing
||(ixrange.colStart==1 && ixrange.colEnd==mcIn.getCols() && mcIn.getRows()<=mcIn.getRowsPerBlock() )); //1-1 row block indexing
}
/**
*
*/
private static class SliceRHSForLeftIndexing implements PairFlatMapFunction<Tuple2<MatrixIndexes,MatrixBlock>, MatrixIndexes, MatrixBlock>
{
private static final long serialVersionUID = 5724800998701216440L;
private long rl;
private long cl;
private int brlen;
private int bclen;
private long lhs_rlen;
private long lhs_clen;
public SliceRHSForLeftIndexing(long rl, long cl, int brlen, int bclen, long lhs_rlen, long lhs_clen) {
this.rl = rl;
this.cl = cl;
this.brlen = brlen;
this.bclen = bclen;
this.lhs_rlen = lhs_rlen;
this.lhs_clen = lhs_clen;
}
@Override
public Iterable<Tuple2<MatrixIndexes, MatrixBlock>> call(Tuple2<MatrixIndexes, MatrixBlock> rightKV)
throws Exception
{
ArrayList<Tuple2<MatrixIndexes, MatrixBlock>> retVal = new ArrayList<Tuple2<MatrixIndexes,MatrixBlock>>();
long start_lhs_globalRowIndex = rl + (rightKV._1.getRowIndex()-1)*brlen;
long start_lhs_globalColIndex = cl + (rightKV._1.getColumnIndex()-1)*bclen;
long end_lhs_globalRowIndex = start_lhs_globalRowIndex + rightKV._2.getNumRows() - 1;
long end_lhs_globalColIndex = start_lhs_globalColIndex + rightKV._2.getNumColumns() - 1;
long start_lhs_rowIndex = UtilFunctions.blockIndexCalculation(start_lhs_globalRowIndex, brlen);
long end_lhs_rowIndex = UtilFunctions.blockIndexCalculation(end_lhs_globalRowIndex, brlen);
long start_lhs_colIndex = UtilFunctions.blockIndexCalculation(start_lhs_globalColIndex, bclen);
long end_lhs_colIndex = UtilFunctions.blockIndexCalculation(end_lhs_globalColIndex, bclen);
for(long leftRowIndex = start_lhs_rowIndex; leftRowIndex <= end_lhs_rowIndex; leftRowIndex++) {
for(long leftColIndex = start_lhs_colIndex; leftColIndex <= end_lhs_colIndex; leftColIndex++) {
// Calculate global index of right hand side block
long lhs_rl = Math.max((leftRowIndex-1)*brlen+1, start_lhs_globalRowIndex);
long lhs_ru = Math.min(leftRowIndex*brlen, end_lhs_globalRowIndex);
long lhs_cl = Math.max((leftColIndex-1)*bclen+1, start_lhs_globalColIndex);
long lhs_cu = Math.min(leftColIndex*bclen, end_lhs_globalColIndex);
int lhs_lrl = UtilFunctions.cellInBlockCalculation(lhs_rl, brlen);
int lhs_lru = UtilFunctions.cellInBlockCalculation(lhs_ru, brlen);
int lhs_lcl = UtilFunctions.cellInBlockCalculation(lhs_cl, bclen);
int lhs_lcu = UtilFunctions.cellInBlockCalculation(lhs_cu, bclen);
long rhs_rl = lhs_rl - rl + 1;
long rhs_ru = rhs_rl + (lhs_ru - lhs_rl);
long rhs_cl = lhs_cl - cl + 1;
long rhs_cu = rhs_cl + (lhs_cu - lhs_cl);
int rhs_lrl = UtilFunctions.cellInBlockCalculation(rhs_rl, brlen);
int rhs_lru = UtilFunctions.cellInBlockCalculation(rhs_ru, brlen);
int rhs_lcl = UtilFunctions.cellInBlockCalculation(rhs_cl, bclen);
int rhs_lcu = UtilFunctions.cellInBlockCalculation(rhs_cu, bclen);
MatrixBlock slicedRHSBlk = rightKV._2.sliceOperations(rhs_lrl, rhs_lru, rhs_lcl, rhs_lcu, new MatrixBlock());
int lbrlen = UtilFunctions.computeBlockSize(lhs_rlen, leftRowIndex, brlen);
int lbclen = UtilFunctions.computeBlockSize(lhs_clen, leftColIndex, bclen);
MatrixBlock resultBlock = new MatrixBlock(lbrlen, lbclen, false);
resultBlock = resultBlock.leftIndexingOperations(slicedRHSBlk, lhs_lrl, lhs_lru, lhs_lcl, lhs_lcu, null, false);
retVal.add(new Tuple2<MatrixIndexes, MatrixBlock>(new MatrixIndexes(leftRowIndex, leftColIndex), resultBlock));
}
}
return retVal;
}
}
/**
*
*/
private static class ZeroOutLHS implements PairFunction<Tuple2<MatrixIndexes,MatrixBlock>, MatrixIndexes,MatrixBlock>
{
private static final long serialVersionUID = -3581795160948484261L;
private boolean complementary = false;
private int brlen; int bclen;
private IndexRange indexRange;
private long rl; long ru; long cl; long cu;
public ZeroOutLHS(boolean complementary, int brlen, int bclen, long rl, long ru, long cl, long cu) {
this.complementary = complementary;
this.brlen = brlen;
this.bclen = bclen;
this.rl = rl;
this.ru = ru;
this.cl = cl;
this.cu = cu;
this.indexRange = new IndexRange(rl, ru, cl, cu);
}
@Override
public Tuple2<MatrixIndexes, MatrixBlock> call(Tuple2<MatrixIndexes, MatrixBlock> kv)
throws Exception
{
if( !UtilFunctions.isInBlockRange(kv._1(), brlen, bclen, rl, ru, cl, cu) ) {
return kv;
}
IndexRange range = UtilFunctions.getSelectedRangeForZeroOut(new IndexedMatrixValue(kv._1, kv._2), brlen, bclen, indexRange);
if(range.rowStart == -1 && range.rowEnd == -1 && range.colStart == -1 && range.colEnd == -1) {
throw new Exception("Error while getting range for zero-out");
}
MatrixBlock zeroBlk = (MatrixBlock) kv._2.zeroOutOperations(new MatrixBlock(), range, complementary);
return new Tuple2<MatrixIndexes, MatrixBlock>(kv._1, zeroBlk);
}
}
/**
*
*/
private static class LeftIndexPartitionFunction implements PairFlatMapFunction<Iterator<Tuple2<MatrixIndexes,MatrixBlock>>, MatrixIndexes, MatrixBlock>
{
private static final long serialVersionUID = 1757075506076838258L;
private PartitionedBroadcastMatrix _binput;
private IndexRange _ixrange;
private int _brlen;
private int _bclen;
public LeftIndexPartitionFunction(PartitionedBroadcastMatrix binput, IndexRange ixrange, MatrixCharacteristics mc)
{
_binput = binput;
_ixrange = ixrange;
_brlen = mc.getRowsPerBlock();
_bclen = mc.getColsPerBlock();
}
@Override
public Iterable<Tuple2<MatrixIndexes, MatrixBlock>> call(Iterator<Tuple2<MatrixIndexes, MatrixBlock>> arg0)
throws Exception
{
return new LeftIndexPartitionIterator(arg0);
}
/**
*
*/
private class LeftIndexPartitionIterator extends LazyIterableIterator<Tuple2<MatrixIndexes, MatrixBlock>>
{
public LeftIndexPartitionIterator(Iterator<Tuple2<MatrixIndexes, MatrixBlock>> in) {
super(in);
}
@Override
protected Tuple2<MatrixIndexes, MatrixBlock> computeNext(Tuple2<MatrixIndexes, MatrixBlock> arg)
throws Exception
{
if(!UtilFunctions.isInBlockRange(arg._1(), _brlen, _bclen, _ixrange)) {
return arg;
}
// Calculate global index of left hand side block
long lhs_rl = Math.max(_ixrange.rowStart, (arg._1.getRowIndex()-1)*_brlen + 1);
long lhs_ru = Math.min(_ixrange.rowEnd, arg._1.getRowIndex()*_brlen);
long lhs_cl = Math.max(_ixrange.colStart, (arg._1.getColumnIndex()-1)*_bclen + 1);
long lhs_cu = Math.min(_ixrange.colEnd, arg._1.getColumnIndex()*_bclen);
// Calculate global index of right hand side block
long rhs_rl = lhs_rl - _ixrange.rowStart + 1;
long rhs_ru = rhs_rl + (lhs_ru - lhs_rl);
long rhs_cl = lhs_cl - _ixrange.colStart + 1;
long rhs_cu = rhs_cl + (lhs_cu - lhs_cl);
// Provide global zero-based index to sliceOperations
MatrixBlock slicedRHSMatBlock = _binput.sliceOperations(rhs_rl, rhs_ru, rhs_cl, rhs_cu, new MatrixBlock());
// Provide local zero-based index to leftIndexingOperations
int lhs_lrl = UtilFunctions.cellInBlockCalculation(lhs_rl, _brlen);
int lhs_lru = UtilFunctions.cellInBlockCalculation(lhs_ru, _brlen);
int lhs_lcl = UtilFunctions.cellInBlockCalculation(lhs_cl, _bclen);
int lhs_lcu = UtilFunctions.cellInBlockCalculation(lhs_cu, _bclen);
MatrixBlock ret = arg._2.leftIndexingOperations(slicedRHSMatBlock, lhs_lrl, lhs_lru, lhs_lcl, lhs_lcu, new MatrixBlock(), false);
return new Tuple2<MatrixIndexes, MatrixBlock>(arg._1, ret);
}
}
}
/**
*
*/
private static class SliceBlock implements PairFlatMapFunction<Tuple2<MatrixIndexes,MatrixBlock>, MatrixIndexes, MatrixBlock>
{
private static final long serialVersionUID = 5733886476413136826L;
private IndexRange _ixrange;
private int _brlen;
private int _bclen;
public SliceBlock(IndexRange ixrange, MatrixCharacteristics mcOut) {
_ixrange = ixrange;
_brlen = mcOut.getRowsPerBlock();
_bclen = mcOut.getColsPerBlock();
}
@Override
public Iterable<Tuple2<MatrixIndexes, MatrixBlock>> call(Tuple2<MatrixIndexes, MatrixBlock> kv)
throws Exception
{
IndexedMatrixValue in = SparkUtils.toIndexedMatrixBlock(kv);
ArrayList<IndexedMatrixValue> outlist = new ArrayList<IndexedMatrixValue>();
OperationsOnMatrixValues.performSlice(in, _ixrange, _brlen, _bclen, outlist);
return SparkUtils.fromIndexedMatrixBlock(outlist);
}
}
/**
*
*/
private static class SliceBlockPartitionFunction implements PairFlatMapFunction<Iterator<Tuple2<MatrixIndexes,MatrixBlock>>, MatrixIndexes, MatrixBlock>
{
private static final long serialVersionUID = -8111291718258309968L;
private IndexRange _ixrange;
private int _brlen;
private int _bclen;
public SliceBlockPartitionFunction(IndexRange ixrange, MatrixCharacteristics mcOut) {
_ixrange = ixrange;
_brlen = mcOut.getRowsPerBlock();
_bclen = mcOut.getColsPerBlock();
}
@Override
public Iterable<Tuple2<MatrixIndexes, MatrixBlock>> call(Iterator<Tuple2<MatrixIndexes, MatrixBlock>> arg0)
throws Exception
{
return new SliceBlockPartitionIterator(arg0);
}
private class SliceBlockPartitionIterator extends LazyIterableIterator<Tuple2<MatrixIndexes, MatrixBlock>>
{
public SliceBlockPartitionIterator(Iterator<Tuple2<MatrixIndexes, MatrixBlock>> in) {
super(in);
}
@Override
protected Tuple2<MatrixIndexes, MatrixBlock> computeNext(Tuple2<MatrixIndexes, MatrixBlock> arg)
throws Exception
{
IndexedMatrixValue in = SparkUtils.toIndexedMatrixBlock(arg);
ArrayList<IndexedMatrixValue> outlist = new ArrayList<IndexedMatrixValue>();
OperationsOnMatrixValues.performSlice(in, _ixrange, _brlen, _bclen, outlist);
assert(outlist.size() == 1); //1-1 row/column block indexing
return SparkUtils.fromIndexedMatrixBlock(outlist.get(0));
}
}
}
}
| |
package net.minecraft.block;
import com.google.common.collect.Lists;
import java.util.List;
import javax.annotation.Nullable;
import net.minecraft.block.material.EnumPushReaction;
import net.minecraft.block.material.Material;
import net.minecraft.block.properties.IProperty;
import net.minecraft.block.properties.PropertyBool;
import net.minecraft.block.state.BlockFaceShape;
import net.minecraft.block.state.BlockPistonStructureHelper;
import net.minecraft.block.state.BlockStateContainer;
import net.minecraft.block.state.IBlockState;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.init.SoundEvents;
import net.minecraft.item.ItemStack;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.tileentity.TileEntityPiston;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.Mirror;
import net.minecraft.util.Rotation;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
public class BlockPistonBase extends BlockDirectional
{
public static final PropertyBool EXTENDED = PropertyBool.create("extended");
protected static final AxisAlignedBB PISTON_BASE_EAST_AABB = new AxisAlignedBB(0.0D, 0.0D, 0.0D, 0.75D, 1.0D, 1.0D);
protected static final AxisAlignedBB PISTON_BASE_WEST_AABB = new AxisAlignedBB(0.25D, 0.0D, 0.0D, 1.0D, 1.0D, 1.0D);
protected static final AxisAlignedBB PISTON_BASE_SOUTH_AABB = new AxisAlignedBB(0.0D, 0.0D, 0.0D, 1.0D, 1.0D, 0.75D);
protected static final AxisAlignedBB PISTON_BASE_NORTH_AABB = new AxisAlignedBB(0.0D, 0.0D, 0.25D, 1.0D, 1.0D, 1.0D);
protected static final AxisAlignedBB PISTON_BASE_UP_AABB = new AxisAlignedBB(0.0D, 0.0D, 0.0D, 1.0D, 0.75D, 1.0D);
protected static final AxisAlignedBB PISTON_BASE_DOWN_AABB = new AxisAlignedBB(0.0D, 0.25D, 0.0D, 1.0D, 1.0D, 1.0D);
/** This piston is the sticky one? */
private final boolean isSticky;
public BlockPistonBase(boolean isSticky)
{
super(Material.PISTON);
this.setDefaultState(this.blockState.getBaseState().withProperty(FACING, EnumFacing.NORTH).withProperty(EXTENDED, Boolean.valueOf(false)));
this.isSticky = isSticky;
this.setSoundType(SoundType.STONE);
this.setHardness(0.5F);
this.setCreativeTab(CreativeTabs.REDSTONE);
}
public boolean causesSuffocation(IBlockState state)
{
return !((Boolean)state.getValue(EXTENDED)).booleanValue();
}
public AxisAlignedBB getBoundingBox(IBlockState state, IBlockAccess source, BlockPos pos)
{
if (((Boolean)state.getValue(EXTENDED)).booleanValue())
{
switch ((EnumFacing)state.getValue(FACING))
{
case DOWN:
return PISTON_BASE_DOWN_AABB;
case UP:
default:
return PISTON_BASE_UP_AABB;
case NORTH:
return PISTON_BASE_NORTH_AABB;
case SOUTH:
return PISTON_BASE_SOUTH_AABB;
case WEST:
return PISTON_BASE_WEST_AABB;
case EAST:
return PISTON_BASE_EAST_AABB;
}
}
else
{
return FULL_BLOCK_AABB;
}
}
/**
* Determines if the block is solid enough on the top side to support other blocks, like redstone components.
*/
public boolean isTopSolid(IBlockState state)
{
return !((Boolean)state.getValue(EXTENDED)).booleanValue() || state.getValue(FACING) == EnumFacing.DOWN;
}
public void addCollisionBoxToList(IBlockState state, World worldIn, BlockPos pos, AxisAlignedBB entityBox, List<AxisAlignedBB> collidingBoxes, @Nullable Entity entityIn, boolean p_185477_7_)
{
addCollisionBoxToList(pos, entityBox, collidingBoxes, state.getBoundingBox(worldIn, pos));
}
/**
* Used to determine ambient occlusion and culling when rebuilding chunks for render
*/
public boolean isOpaqueCube(IBlockState state)
{
return false;
}
/**
* Called by ItemBlocks after a block is set in the world, to allow post-place logic
*/
public void onBlockPlacedBy(World worldIn, BlockPos pos, IBlockState state, EntityLivingBase placer, ItemStack stack)
{
worldIn.setBlockState(pos, state.withProperty(FACING, EnumFacing.getDirectionFromEntityLiving(pos, placer)), 2);
if (!worldIn.isRemote)
{
this.checkForMove(worldIn, pos, state);
}
}
/**
* Called when a neighboring block was changed and marks that this state should perform any checks during a neighbor
* change. Cases may include when redstone power is updated, cactus blocks popping off due to a neighboring solid
* block, etc.
*/
public void neighborChanged(IBlockState state, World worldIn, BlockPos pos, Block blockIn, BlockPos fromPos)
{
if (!worldIn.isRemote)
{
this.checkForMove(worldIn, pos, state);
}
}
/**
* Called after the block is set in the Chunk data, but before the Tile Entity is set
*/
public void onBlockAdded(World worldIn, BlockPos pos, IBlockState state)
{
if (!worldIn.isRemote && worldIn.getTileEntity(pos) == null)
{
this.checkForMove(worldIn, pos, state);
}
}
/**
* Called by ItemBlocks just before a block is actually set in the world, to allow for adjustments to the
* IBlockstate
*/
public IBlockState getStateForPlacement(World worldIn, BlockPos pos, EnumFacing facing, float hitX, float hitY, float hitZ, int meta, EntityLivingBase placer)
{
return this.getDefaultState().withProperty(FACING, EnumFacing.getDirectionFromEntityLiving(pos, placer)).withProperty(EXTENDED, Boolean.valueOf(false));
}
private void checkForMove(World worldIn, BlockPos pos, IBlockState state)
{
EnumFacing enumfacing = (EnumFacing)state.getValue(FACING);
boolean flag = this.shouldBeExtended(worldIn, pos, enumfacing);
if (flag && !((Boolean)state.getValue(EXTENDED)).booleanValue())
{
if ((new BlockPistonStructureHelper(worldIn, pos, enumfacing, true)).canMove())
{
worldIn.addBlockEvent(pos, this, 0, enumfacing.getIndex());
}
}
else if (!flag && ((Boolean)state.getValue(EXTENDED)).booleanValue())
{
worldIn.addBlockEvent(pos, this, 1, enumfacing.getIndex());
}
}
private boolean shouldBeExtended(World worldIn, BlockPos pos, EnumFacing facing)
{
for (EnumFacing enumfacing : EnumFacing.values())
{
if (enumfacing != facing && worldIn.isSidePowered(pos.offset(enumfacing), enumfacing))
{
return true;
}
}
if (worldIn.isSidePowered(pos, EnumFacing.DOWN))
{
return true;
}
else
{
BlockPos blockpos = pos.up();
for (EnumFacing enumfacing1 : EnumFacing.values())
{
if (enumfacing1 != EnumFacing.DOWN && worldIn.isSidePowered(blockpos.offset(enumfacing1), enumfacing1))
{
return true;
}
}
return false;
}
}
/**
* Called on server when World#addBlockEvent is called. If server returns true, then also called on the client. On
* the Server, this may perform additional changes to the world, like pistons replacing the block with an extended
* base. On the client, the update may involve replacing tile entities or effects such as sounds or particles
*/
public boolean eventReceived(IBlockState state, World worldIn, BlockPos pos, int id, int param)
{
EnumFacing enumfacing = (EnumFacing)state.getValue(FACING);
if (!worldIn.isRemote)
{
boolean flag = this.shouldBeExtended(worldIn, pos, enumfacing);
if (flag && id == 1)
{
worldIn.setBlockState(pos, state.withProperty(EXTENDED, Boolean.valueOf(true)), 2);
return false;
}
if (!flag && id == 0)
{
return false;
}
}
if (id == 0)
{
if (!this.doMove(worldIn, pos, enumfacing, true))
{
return false;
}
worldIn.setBlockState(pos, state.withProperty(EXTENDED, Boolean.valueOf(true)), 3);
worldIn.playSound((EntityPlayer)null, pos, SoundEvents.BLOCK_PISTON_EXTEND, SoundCategory.BLOCKS, 0.5F, worldIn.rand.nextFloat() * 0.25F + 0.6F);
}
else if (id == 1)
{
TileEntity tileentity1 = worldIn.getTileEntity(pos.offset(enumfacing));
if (tileentity1 instanceof TileEntityPiston)
{
((TileEntityPiston)tileentity1).clearPistonTileEntity();
}
worldIn.setBlockState(pos, Blocks.PISTON_EXTENSION.getDefaultState().withProperty(BlockPistonMoving.FACING, enumfacing).withProperty(BlockPistonMoving.TYPE, this.isSticky ? BlockPistonExtension.EnumPistonType.STICKY : BlockPistonExtension.EnumPistonType.DEFAULT), 3);
worldIn.setTileEntity(pos, BlockPistonMoving.createTilePiston(this.getStateFromMeta(param), enumfacing, false, true));
if (this.isSticky)
{
BlockPos blockpos = pos.add(enumfacing.getFrontOffsetX() * 2, enumfacing.getFrontOffsetY() * 2, enumfacing.getFrontOffsetZ() * 2);
IBlockState iblockstate = worldIn.getBlockState(blockpos);
Block block = iblockstate.getBlock();
boolean flag1 = false;
if (block == Blocks.PISTON_EXTENSION)
{
TileEntity tileentity = worldIn.getTileEntity(blockpos);
if (tileentity instanceof TileEntityPiston)
{
TileEntityPiston tileentitypiston = (TileEntityPiston)tileentity;
if (tileentitypiston.getFacing() == enumfacing && tileentitypiston.isExtending())
{
tileentitypiston.clearPistonTileEntity();
flag1 = true;
}
}
}
if (!flag1 && !iblockstate.getBlock().isAir(iblockstate, worldIn, blockpos) && canPush(iblockstate, worldIn, blockpos, enumfacing.getOpposite(), false, enumfacing) && (iblockstate.getMobilityFlag() == EnumPushReaction.NORMAL || block == Blocks.PISTON || block == Blocks.STICKY_PISTON))
{
this.doMove(worldIn, pos, enumfacing, false);
}
}
else
{
worldIn.setBlockToAir(pos.offset(enumfacing));
}
worldIn.playSound((EntityPlayer)null, pos, SoundEvents.BLOCK_PISTON_CONTRACT, SoundCategory.BLOCKS, 0.5F, worldIn.rand.nextFloat() * 0.15F + 0.6F);
}
return true;
}
public boolean isFullCube(IBlockState state)
{
return false;
}
@Nullable
public static EnumFacing getFacing(int meta)
{
int i = meta & 7;
return i > 5 ? null : EnumFacing.getFront(i);
}
/**
* Checks if the piston can push the given BlockState.
*/
public static boolean canPush(IBlockState blockStateIn, World worldIn, BlockPos pos, EnumFacing facing, boolean destroyBlocks, EnumFacing p_185646_5_)
{
Block block = blockStateIn.getBlock();
if (block == Blocks.OBSIDIAN)
{
return false;
}
else if (!worldIn.getWorldBorder().contains(pos))
{
return false;
}
else if (pos.getY() >= 0 && (facing != EnumFacing.DOWN || pos.getY() != 0))
{
if (pos.getY() <= worldIn.getHeight() - 1 && (facing != EnumFacing.UP || pos.getY() != worldIn.getHeight() - 1))
{
if (block != Blocks.PISTON && block != Blocks.STICKY_PISTON)
{
if (blockStateIn.getBlockHardness(worldIn, pos) == -1.0F)
{
return false;
}
switch (blockStateIn.getMobilityFlag())
{
case BLOCK:
return false;
case DESTROY:
return destroyBlocks;
case PUSH_ONLY:
return facing == p_185646_5_;
}
}
else if (((Boolean)blockStateIn.getValue(EXTENDED)).booleanValue())
{
return false;
}
return !block.hasTileEntity(blockStateIn);
}
else
{
return false;
}
}
else
{
return false;
}
}
private boolean doMove(World worldIn, BlockPos pos, EnumFacing direction, boolean extending)
{
if (!extending)
{
worldIn.setBlockToAir(pos.offset(direction));
}
BlockPistonStructureHelper blockpistonstructurehelper = new BlockPistonStructureHelper(worldIn, pos, direction, extending);
if (!blockpistonstructurehelper.canMove())
{
return false;
}
else
{
List<BlockPos> list = blockpistonstructurehelper.getBlocksToMove();
List<IBlockState> list1 = Lists.<IBlockState>newArrayList();
for (int i = 0; i < list.size(); ++i)
{
BlockPos blockpos = list.get(i);
list1.add(worldIn.getBlockState(blockpos).getActualState(worldIn, blockpos));
}
List<BlockPos> list2 = blockpistonstructurehelper.getBlocksToDestroy();
int k = list.size() + list2.size();
IBlockState[] aiblockstate = new IBlockState[k];
EnumFacing enumfacing = extending ? direction : direction.getOpposite();
for (int j = list2.size() - 1; j >= 0; --j)
{
BlockPos blockpos1 = list2.get(j);
IBlockState iblockstate = worldIn.getBlockState(blockpos1);
// Forge: With our change to how snowballs are dropped this needs to disallow to mimic vanilla behavior.
float chance = iblockstate.getBlock() instanceof BlockSnow ? -1.0f : 1.0f;
iblockstate.getBlock().dropBlockAsItemWithChance(worldIn, blockpos1, iblockstate, chance, 0);
worldIn.setBlockState(blockpos1, Blocks.AIR.getDefaultState(), 4);
--k;
aiblockstate[k] = iblockstate;
}
for (int l = list.size() - 1; l >= 0; --l)
{
BlockPos blockpos3 = list.get(l);
IBlockState iblockstate2 = worldIn.getBlockState(blockpos3);
worldIn.setBlockState(blockpos3, Blocks.AIR.getDefaultState(), 2);
blockpos3 = blockpos3.offset(enumfacing);
worldIn.setBlockState(blockpos3, Blocks.PISTON_EXTENSION.getDefaultState().withProperty(FACING, direction), 4);
worldIn.setTileEntity(blockpos3, BlockPistonMoving.createTilePiston(list1.get(l), direction, extending, false));
--k;
aiblockstate[k] = iblockstate2;
}
BlockPos blockpos2 = pos.offset(direction);
if (extending)
{
BlockPistonExtension.EnumPistonType blockpistonextension$enumpistontype = this.isSticky ? BlockPistonExtension.EnumPistonType.STICKY : BlockPistonExtension.EnumPistonType.DEFAULT;
IBlockState iblockstate3 = Blocks.PISTON_HEAD.getDefaultState().withProperty(BlockPistonExtension.FACING, direction).withProperty(BlockPistonExtension.TYPE, blockpistonextension$enumpistontype);
IBlockState iblockstate1 = Blocks.PISTON_EXTENSION.getDefaultState().withProperty(BlockPistonMoving.FACING, direction).withProperty(BlockPistonMoving.TYPE, this.isSticky ? BlockPistonExtension.EnumPistonType.STICKY : BlockPistonExtension.EnumPistonType.DEFAULT);
worldIn.setBlockState(blockpos2, iblockstate1, 4);
worldIn.setTileEntity(blockpos2, BlockPistonMoving.createTilePiston(iblockstate3, direction, true, true));
}
for (int i1 = list2.size() - 1; i1 >= 0; --i1)
{
worldIn.notifyNeighborsOfStateChange(list2.get(i1), aiblockstate[k++].getBlock(), false);
}
for (int j1 = list.size() - 1; j1 >= 0; --j1)
{
worldIn.notifyNeighborsOfStateChange(list.get(j1), aiblockstate[k++].getBlock(), false);
}
if (extending)
{
worldIn.notifyNeighborsOfStateChange(blockpos2, Blocks.PISTON_HEAD, false);
}
return true;
}
}
/**
* Convert the given metadata into a BlockState for this Block
*/
public IBlockState getStateFromMeta(int meta)
{
return this.getDefaultState().withProperty(FACING, getFacing(meta)).withProperty(EXTENDED, Boolean.valueOf((meta & 8) > 0));
}
/**
* Convert the BlockState into the correct metadata value
*/
public int getMetaFromState(IBlockState state)
{
int i = 0;
i = i | ((EnumFacing)state.getValue(FACING)).getIndex();
if (((Boolean)state.getValue(EXTENDED)).booleanValue())
{
i |= 8;
}
return i;
}
/**
* Returns the blockstate with the given rotation from the passed blockstate. If inapplicable, returns the passed
* blockstate.
*/
public IBlockState withRotation(IBlockState state, Rotation rot)
{
return state.withProperty(FACING, rot.rotate((EnumFacing)state.getValue(FACING)));
}
/**
* Returns the blockstate with the given mirror of the passed blockstate. If inapplicable, returns the passed
* blockstate.
*/
public IBlockState withMirror(IBlockState state, Mirror mirrorIn)
{
return state.withRotation(mirrorIn.toRotation((EnumFacing)state.getValue(FACING)));
}
protected BlockStateContainer createBlockState()
{
return new BlockStateContainer(this, new IProperty[] {FACING, EXTENDED});
}
/* ======================================== FORGE START =====================================*/
public boolean rotateBlock(World world, BlockPos pos, EnumFacing axis)
{
IBlockState state = world.getBlockState(pos);
return !state.getValue(EXTENDED) && super.rotateBlock(world, pos, axis);
}
public BlockFaceShape getBlockFaceShape(IBlockAccess p_193383_1_, IBlockState p_193383_2_, BlockPos p_193383_3_, EnumFacing p_193383_4_)
{
p_193383_2_ = this.getActualState(p_193383_2_, p_193383_1_, p_193383_3_);
return p_193383_2_.getValue(FACING) != p_193383_4_.getOpposite() && ((Boolean)p_193383_2_.getValue(EXTENDED)).booleanValue() ? BlockFaceShape.UNDEFINED : BlockFaceShape.SOLID;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.auth;
import java.util.*;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.schema.SchemaConstants;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.cql3.statements.BatchStatement;
import org.apache.cassandra.cql3.statements.ModificationStatement;
import org.apache.cassandra.cql3.statements.SelectStatement;
import org.apache.cassandra.db.ConsistencyLevel;
import org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.cql3.QueryOptions;
import org.apache.cassandra.cql3.QueryProcessor;
import org.apache.cassandra.cql3.UntypedResultSet;
import org.apache.cassandra.service.QueryState;
import org.apache.cassandra.transport.messages.ResultMessage;
import org.apache.cassandra.utils.ByteBufferUtil;
/**
* CassandraAuthorizer is an IAuthorizer implementation that keeps
* user permissions internally in C* using the system_auth.role_permissions
* table.
*/
public class CassandraAuthorizer implements IAuthorizer
{
private static final Logger logger = LoggerFactory.getLogger(CassandraAuthorizer.class);
private static final String ROLE = "role";
private static final String RESOURCE = "resource";
private static final String PERMISSIONS = "permissions";
private SelectStatement authorizeRoleStatement;
public CassandraAuthorizer()
{
}
// Returns every permission on the resource granted to the user either directly
// or indirectly via roles granted to the user.
public Set<Permission> authorize(AuthenticatedUser user, IResource resource)
{
if (user.isSuper())
return resource.applicablePermissions();
Set<Permission> permissions = EnumSet.noneOf(Permission.class);
for (RoleResource role: user.getRoles())
addPermissionsForRole(permissions, resource, role);
return permissions;
}
public void grant(AuthenticatedUser performer, Set<Permission> permissions, IResource resource, RoleResource grantee)
throws RequestValidationException, RequestExecutionException
{
modifyRolePermissions(permissions, resource, grantee, "+");
addLookupEntry(resource, grantee);
}
public void revoke(AuthenticatedUser performer, Set<Permission> permissions, IResource resource, RoleResource revokee)
throws RequestValidationException, RequestExecutionException
{
modifyRolePermissions(permissions, resource, revokee, "-");
removeLookupEntry(resource, revokee);
}
// Called when deleting a role with DROP ROLE query.
// Internal hook, so no permission checks are needed here.
// Executes a logged batch removing the granted premissions
// for the role as well as the entries from the reverse index
// table
public void revokeAllFrom(RoleResource revokee)
{
try
{
UntypedResultSet rows = process(String.format("SELECT resource FROM %s.%s WHERE role = '%s'",
SchemaConstants.AUTH_KEYSPACE_NAME,
AuthKeyspace.ROLE_PERMISSIONS,
escape(revokee.getRoleName())));
List<CQLStatement> statements = new ArrayList<>();
for (UntypedResultSet.Row row : rows)
{
statements.add(
QueryProcessor.getStatement(String.format("DELETE FROM %s.%s WHERE resource = '%s' AND role = '%s'",
SchemaConstants.AUTH_KEYSPACE_NAME,
AuthKeyspace.RESOURCE_ROLE_INDEX,
escape(row.getString("resource")),
escape(revokee.getRoleName())),
ClientState.forInternalCalls()).statement);
}
statements.add(QueryProcessor.getStatement(String.format("DELETE FROM %s.%s WHERE role = '%s'",
SchemaConstants.AUTH_KEYSPACE_NAME,
AuthKeyspace.ROLE_PERMISSIONS,
escape(revokee.getRoleName())),
ClientState.forInternalCalls()).statement);
executeLoggedBatch(statements);
}
catch (RequestExecutionException | RequestValidationException e)
{
logger.warn("CassandraAuthorizer failed to revoke all permissions of {}: {}",
revokee.getRoleName(),
e.getMessage());
}
}
// Called after a resource is removed (DROP KEYSPACE, DROP TABLE, etc.).
// Execute a logged batch removing all the permissions for the resource
// as well as the index table entry
public void revokeAllOn(IResource droppedResource)
{
try
{
UntypedResultSet rows = process(String.format("SELECT role FROM %s.%s WHERE resource = '%s'",
SchemaConstants.AUTH_KEYSPACE_NAME,
AuthKeyspace.RESOURCE_ROLE_INDEX,
escape(droppedResource.getName())));
List<CQLStatement> statements = new ArrayList<>();
for (UntypedResultSet.Row row : rows)
{
statements.add(QueryProcessor.getStatement(String.format("DELETE FROM %s.%s WHERE role = '%s' AND resource = '%s'",
SchemaConstants.AUTH_KEYSPACE_NAME,
AuthKeyspace.ROLE_PERMISSIONS,
escape(row.getString("role")),
escape(droppedResource.getName())),
ClientState.forInternalCalls()).statement);
}
statements.add(QueryProcessor.getStatement(String.format("DELETE FROM %s.%s WHERE resource = '%s'",
SchemaConstants.AUTH_KEYSPACE_NAME,
AuthKeyspace.RESOURCE_ROLE_INDEX,
escape(droppedResource.getName())),
ClientState.forInternalCalls()).statement);
executeLoggedBatch(statements);
}
catch (RequestExecutionException | RequestValidationException e)
{
logger.warn("CassandraAuthorizer failed to revoke all permissions on {}: {}", droppedResource, e.getMessage());
return;
}
}
private void executeLoggedBatch(List<CQLStatement> statements)
throws RequestExecutionException, RequestValidationException
{
BatchStatement batch = new BatchStatement(0,
BatchStatement.Type.LOGGED,
Lists.newArrayList(Iterables.filter(statements, ModificationStatement.class)),
Attributes.none());
QueryProcessor.instance.processBatch(batch,
QueryState.forInternalCalls(),
BatchQueryOptions.withoutPerStatementVariables(QueryOptions.DEFAULT),
System.nanoTime());
}
// Add every permission on the resource granted to the role
private void addPermissionsForRole(Set<Permission> permissions, IResource resource, RoleResource role)
throws RequestExecutionException, RequestValidationException
{
QueryOptions options = QueryOptions.forInternalCalls(ConsistencyLevel.LOCAL_ONE,
Lists.newArrayList(ByteBufferUtil.bytes(role.getRoleName()),
ByteBufferUtil.bytes(resource.getName())));
ResultMessage.Rows rows = authorizeRoleStatement.execute(QueryState.forInternalCalls(), options, System.nanoTime());
UntypedResultSet result = UntypedResultSet.create(rows.result);
if (!result.isEmpty() && result.one().has(PERMISSIONS))
{
for (String perm : result.one().getSet(PERMISSIONS, UTF8Type.instance))
{
permissions.add(Permission.valueOf(perm));
}
}
}
// Adds or removes permissions from a role_permissions table (adds if op is "+", removes if op is "-")
private void modifyRolePermissions(Set<Permission> permissions, IResource resource, RoleResource role, String op)
throws RequestExecutionException
{
process(String.format("UPDATE %s.%s SET permissions = permissions %s {%s} WHERE role = '%s' AND resource = '%s'",
SchemaConstants.AUTH_KEYSPACE_NAME,
AuthKeyspace.ROLE_PERMISSIONS,
op,
"'" + StringUtils.join(permissions, "','") + "'",
escape(role.getRoleName()),
escape(resource.getName())));
}
// Removes an entry from the inverted index table (from resource -> role with defined permissions)
private void removeLookupEntry(IResource resource, RoleResource role) throws RequestExecutionException
{
process(String.format("DELETE FROM %s.%s WHERE resource = '%s' and role = '%s'",
SchemaConstants.AUTH_KEYSPACE_NAME,
AuthKeyspace.RESOURCE_ROLE_INDEX,
escape(resource.getName()),
escape(role.getRoleName())));
}
// Adds an entry to the inverted index table (from resource -> role with defined permissions)
private void addLookupEntry(IResource resource, RoleResource role) throws RequestExecutionException
{
process(String.format("INSERT INTO %s.%s (resource, role) VALUES ('%s','%s')",
SchemaConstants.AUTH_KEYSPACE_NAME,
AuthKeyspace.RESOURCE_ROLE_INDEX,
escape(resource.getName()),
escape(role.getRoleName())));
}
// 'of' can be null - in that case everyone's permissions have been requested. Otherwise only single user's.
// If the user requesting 'LIST PERMISSIONS' is not a superuser OR their username doesn't match 'of', we
// throw UnauthorizedException. So only a superuser can view everybody's permissions. Regular users are only
// allowed to see their own permissions.
public Set<PermissionDetails> list(AuthenticatedUser performer,
Set<Permission> permissions,
IResource resource,
RoleResource grantee)
throws RequestValidationException, RequestExecutionException
{
if (!(performer.isSuper() || performer.isSystem()) && !performer.getRoles().contains(grantee))
throw new UnauthorizedException(String.format("You are not authorized to view %s's permissions",
grantee == null ? "everyone" : grantee.getRoleName()));
if (null == grantee)
return listPermissionsForRole(permissions, resource, grantee);
Set<RoleResource> roles = DatabaseDescriptor.getRoleManager().getRoles(grantee, true);
Set<PermissionDetails> details = new HashSet<>();
for (RoleResource role : roles)
details.addAll(listPermissionsForRole(permissions, resource, role));
return details;
}
private Set<PermissionDetails> listPermissionsForRole(Set<Permission> permissions,
IResource resource,
RoleResource role)
throws RequestExecutionException
{
Set<PermissionDetails> details = new HashSet<>();
for (UntypedResultSet.Row row : process(buildListQuery(resource, role)))
{
if (row.has(PERMISSIONS))
{
for (String p : row.getSet(PERMISSIONS, UTF8Type.instance))
{
Permission permission = Permission.valueOf(p);
if (permissions.contains(permission))
details.add(new PermissionDetails(row.getString(ROLE),
Resources.fromName(row.getString(RESOURCE)),
permission));
}
}
}
return details;
}
private String buildListQuery(IResource resource, RoleResource grantee)
{
List<String> vars = Lists.newArrayList(SchemaConstants.AUTH_KEYSPACE_NAME, AuthKeyspace.ROLE_PERMISSIONS);
List<String> conditions = new ArrayList<>();
if (resource != null)
{
conditions.add("resource = '%s'");
vars.add(escape(resource.getName()));
}
if (grantee != null)
{
conditions.add(ROLE + " = '%s'");
vars.add(escape(grantee.getRoleName()));
}
String query = "SELECT " + ROLE + ", resource, permissions FROM %s.%s";
if (!conditions.isEmpty())
query += " WHERE " + StringUtils.join(conditions, " AND ");
if (resource != null && grantee == null)
query += " ALLOW FILTERING";
return String.format(query, vars.toArray());
}
public Set<DataResource> protectedResources()
{
return ImmutableSet.of(DataResource.table(SchemaConstants.AUTH_KEYSPACE_NAME, AuthKeyspace.ROLE_PERMISSIONS));
}
public void validateConfiguration() throws ConfigurationException
{
}
public void setup()
{
authorizeRoleStatement = prepare(ROLE, AuthKeyspace.ROLE_PERMISSIONS);
}
private SelectStatement prepare(String entityname, String permissionsTable)
{
String query = String.format("SELECT permissions FROM %s.%s WHERE %s = ? AND resource = ?",
SchemaConstants.AUTH_KEYSPACE_NAME,
permissionsTable,
entityname);
return (SelectStatement) QueryProcessor.getStatement(query, ClientState.forInternalCalls()).statement;
}
// We only worry about one character ('). Make sure it's properly escaped.
private String escape(String name)
{
return StringUtils.replace(name, "'", "''");
}
private UntypedResultSet process(String query) throws RequestExecutionException
{
return QueryProcessor.process(query, ConsistencyLevel.LOCAL_ONE);
}
}
| |
/* $Id: BufferedArrayInputStream.java 13086 2011-03-14 11:02:31Z ceriel $ */
package nl.esciencecenter.aether.io;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ReadOnlyBufferException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is a complete implementation of <code>DataInputStream</code>.
* It is built on top of an <code>InputStream</code>.
* There is no need to put any buffering inbetween. This implementation
* does all the buffering needed.
*/
public final class BufferedArrayInputStream extends DataInputStream {
private static final boolean DEBUG = IOProperties.DEBUG;
private static final Logger logger = LoggerFactory.getLogger(BufferedArrayInputStream.class);
/** The buffer size. */
private final int BUF_SIZE;
/** The underlying <code>InputStream</code>. */
private InputStream in;
/** The buffer. */
private byte[] buffer;
private int index, buffered_bytes;
/** Number of bytes read so far from the underlying layer. */
private long bytes = 0;
/** Object used to convert primitive types to bytes. */
private Conversion conversion;
/**
* Constructor.
* @param in the underlying <code>InStream</code>
* @param bufSize the size of the input buffer in bytes
*/
public BufferedArrayInputStream(InputStream in, int bufSize) {
this.in = in;
BUF_SIZE = bufSize;
buffer = new byte[BUF_SIZE];
conversion = Conversion.loadConversion(false);
}
/**
* Constructor.
* @param in the underlying <code>InStream</code>
*/
public BufferedArrayInputStream(InputStream in) {
this(in, IOProperties.BUFFER_SIZE);
}
public long bytesRead() {
return bytes - buffered_bytes;
}
public void resetBytesRead() {
bytes = buffered_bytes;
}
private static final int min(int a, int b) {
return (a > b) ? b : a;
}
public final int read() throws IOException {
try {
int b = readByte();
return (b & 0377);
} catch(EOFException e) {
return -1;
}
}
private final void fillBuffer(int len) throws IOException {
// This ensures that there are at least 'len' bytes in the buffer
// PRECONDITION: 'index + buffered_bytes' should never be larger
// than BUF_SIZE!!
if (buffered_bytes >= len) {
return;
}
if (buffered_bytes == 0) {
index = 0;
} else if (index + buffered_bytes > BUF_SIZE - len) {
// not enough space for "len" more bytes
System.arraycopy(buffer, index, buffer, 0, buffered_bytes);
index = 0;
}
while (buffered_bytes < len) {
int n = in.read(buffer, index + buffered_bytes, BUF_SIZE
- (index + buffered_bytes));
if (n < 0) {
throw new java.io.EOFException("EOF encountered");
}
bytes += n;
buffered_bytes += n;
}
}
public final int available() throws IOException {
return (buffered_bytes + in.available());
}
public void readArray(boolean[] a, int off, int len) throws IOException {
if (DEBUG && logger.isDebugEnabled()) {
logger.debug("readArray(boolean[" + off + " ... "
+ (off + len) + "])");
}
int useable, converted;
int to_convert = len * Constants.SIZEOF_BOOLEAN;
while (buffered_bytes < to_convert) {
// not enough data in the buffer
if (buffered_bytes == 0) {
index = 0;
fillBuffer(min(BUF_SIZE, to_convert));
} else {
// first, copy the data we do have to 'a' .
useable = buffered_bytes / Constants.SIZEOF_BOOLEAN;
conversion.byte2boolean(buffer, index, a, off, useable);
len -= useable;
off += useable;
converted = useable * Constants.SIZEOF_BOOLEAN;
index += converted;
buffered_bytes -= converted;
to_convert -= converted;
// second, copy the leftovers to the start of the buffer.
for (int i = 0; i < buffered_bytes; i++) {
buffer[i] = buffer[index + i];
}
index = 0;
// third, fill the buffer as far as possible.
fillBuffer(min(BUF_SIZE, to_convert));
}
}
// enough data in the buffer
conversion.byte2boolean(buffer, index, a, off, len);
buffered_bytes -= to_convert;
index += to_convert;
}
public void readArray(byte[] a, int off, int len) throws IOException {
if (DEBUG && logger.isDebugEnabled()) {
logger.debug("readArray(byte[" + off + " ... " + (off + len)
+ "])");
}
if (buffered_bytes >= len) {
// data is already in the buffer.
System.arraycopy(buffer, index, a, off, len);
index += len;
buffered_bytes -= len;
} else {
if (buffered_bytes != 0) {
// first, copy the data we do have to 'a' .
System.arraycopy(buffer, index, a, off, buffered_bytes);
}
int rd = buffered_bytes;
index = 0;
do {
int n = in.read(a, off + rd, len - rd);
if (n < 0) {
throw new java.io.EOFException("EOF encountered");
}
rd += n;
bytes += n;
} while (rd < len);
buffered_bytes = 0;
}
}
// static int R = 0;
// static int W = 0;
public void readArray(short[] a, int off, int len) throws IOException {
if (DEBUG && logger.isDebugEnabled()) {
logger.debug("readArray(char[" + off + " ... " + (off + len)
+ "])");
}
int useable, converted;
int to_convert = len * Constants.SIZEOF_SHORT;
while (buffered_bytes < to_convert) {
// not enough data in the buffer
if (buffered_bytes == 0) {
index = 0;
fillBuffer(min(BUF_SIZE, to_convert));
} else {
// first, copy the data we do have to 'a' .
useable = buffered_bytes / Constants.SIZEOF_SHORT;
conversion.byte2short(buffer, index, a, off, useable);
len -= useable;
off += useable;
converted = useable * Constants.SIZEOF_SHORT;
index += converted;
buffered_bytes -= converted;
to_convert -= converted;
// second, copy the leftovers to the start of the buffer.
for (int i = 0; i < buffered_bytes; i++) {
buffer[i] = buffer[index + i];
}
index = 0;
// third, fill the buffer as far as possible.
fillBuffer(min(BUF_SIZE, to_convert));
}
}
// enough data in the buffer
conversion.byte2short(buffer, index, a, off, len);
buffered_bytes -= to_convert;
index += to_convert;
}
public void readArray(char[] a, int off, int len) throws IOException {
if (DEBUG && logger.isDebugEnabled()) {
logger.debug("readArray(char[" + off + " ... " + (off + len)
+ "])");
}
int useable, converted;
int to_convert = len * Constants.SIZEOF_CHAR;
while (buffered_bytes < to_convert) {
// not enough data in the buffer
if (buffered_bytes == 0) {
index = 0;
fillBuffer(min(BUF_SIZE, to_convert));
} else {
// first, copy the data we do have to 'a' .
useable = buffered_bytes / Constants.SIZEOF_CHAR;
conversion.byte2char(buffer, index, a, off, useable);
len -= useable;
off += useable;
converted = useable * Constants.SIZEOF_CHAR;
index += converted;
buffered_bytes -= converted;
to_convert -= converted;
// second, copy the leftovers to the start of the buffer.
for (int i = 0; i < buffered_bytes; i++) {
buffer[i] = buffer[index + i];
}
index = 0;
// third, fill the buffer as far as possible.
fillBuffer(min(BUF_SIZE, to_convert));
}
}
// enough data in the buffer
conversion.byte2char(buffer, index, a, off, len);
buffered_bytes -= to_convert;
index += to_convert;
}
public void readArray(int[] a, int off, int len) throws IOException {
if (DEBUG && logger.isDebugEnabled()) {
logger.debug("readArray(int[" + off + " ... " + (off + len)
+ "])");
}
int useable, converted;
int to_convert = len * Constants.SIZEOF_INT;
while (buffered_bytes < to_convert) {
// not enough data in the buffer
if (buffered_bytes == 0) {
index = 0;
fillBuffer(min(BUF_SIZE, to_convert));
} else {
// first, copy the data we do have to 'a' .
useable = buffered_bytes / Constants.SIZEOF_INT;
conversion.byte2int(buffer, index, a, off, useable);
len -= useable;
off += useable;
converted = useable * Constants.SIZEOF_INT;
index += converted;
buffered_bytes -= converted;
to_convert -= converted;
// second, copy the leftovers to the start of the buffer.
for (int i = 0; i < buffered_bytes; i++) {
buffer[i] = buffer[index + i];
}
index = 0;
// third, fill the buffer as far as possible.
fillBuffer(min(BUF_SIZE, to_convert));
}
}
// enough data in the buffer
conversion.byte2int(buffer, index, a, off, len);
buffered_bytes -= to_convert;
index += to_convert;
}
public void readArray(long[] a, int off, int len) throws IOException {
if (DEBUG && logger.isDebugEnabled()) {
logger.debug("readArray(long[" + off + " ... " + (off + len)
+ "])");
}
int useable, converted;
int to_convert = len * Constants.SIZEOF_LONG;
while (buffered_bytes < to_convert) {
// not enough data in the buffer
if (buffered_bytes == 0) {
index = 0;
fillBuffer(min(BUF_SIZE, to_convert));
} else {
// first, copy the data we do have to 'a' .
useable = buffered_bytes / Constants.SIZEOF_LONG;
conversion.byte2long(buffer, index, a, off, useable);
len -= useable;
off += useable;
converted = useable * Constants.SIZEOF_LONG;
index += converted;
buffered_bytes -= converted;
to_convert -= converted;
// second, copy the leftovers to the start of the buffer.
for (int i = 0; i < buffered_bytes; i++) {
buffer[i] = buffer[index + i];
}
index = 0;
// third, fill the buffer as far as possible.
fillBuffer(min(BUF_SIZE, to_convert));
}
}
// enough data in the buffer
conversion.byte2long(buffer, index, a, off, len);
buffered_bytes -= to_convert;
index += to_convert;
}
public void readArray(float[] a, int off, int len) throws IOException {
if (DEBUG && logger.isDebugEnabled()) {
logger.debug("readArray(float[" + off + " ... " + (off + len)
+ "])");
}
int useable, converted;
int to_convert = len * Constants.SIZEOF_FLOAT;
while (buffered_bytes < to_convert) {
// not enough data in the buffer
if (buffered_bytes == 0) {
index = 0;
fillBuffer(min(BUF_SIZE, to_convert));
} else {
// first, copy the data we do have to 'a' .
useable = buffered_bytes / Constants.SIZEOF_FLOAT;
conversion.byte2float(buffer, index, a, off, useable);
len -= useable;
off += useable;
converted = useable * Constants.SIZEOF_FLOAT;
index += converted;
buffered_bytes -= converted;
to_convert -= converted;
// second, copy the leftovers to the start of the buffer.
for (int i = 0; i < buffered_bytes; i++) {
buffer[i] = buffer[index + i];
}
index = 0;
// third, fill the buffer as far as possible.
fillBuffer(min(BUF_SIZE, to_convert));
}
}
// enough data in the buffer
conversion.byte2float(buffer, index, a, off, len);
buffered_bytes -= to_convert;
index += to_convert;
}
public void readArray(double[] a, int off, int len) throws IOException {
if (DEBUG && logger.isDebugEnabled()) {
logger.debug("readArray(double[" + off + " ... "
+ (off + len) + "])");
}
int useable, converted;
int to_convert = len * Constants.SIZEOF_DOUBLE;
while (buffered_bytes < to_convert) {
// not enough data in the buffer
if (buffered_bytes == 0) {
index = 0;
fillBuffer(min(BUF_SIZE, to_convert));
} else {
// first, copy the data we do have to 'a' .
useable = buffered_bytes / Constants.SIZEOF_DOUBLE;
conversion.byte2double(buffer, index, a, off, useable);
len -= useable;
off += useable;
converted = useable * Constants.SIZEOF_DOUBLE;
index += converted;
buffered_bytes -= converted;
to_convert -= converted;
// second, copy the leftovers to the start of the buffer.
for (int i = 0; i < buffered_bytes; i++) {
buffer[i] = buffer[index + i];
}
index = 0;
// third, fill the buffer as far as possible.
fillBuffer(min(BUF_SIZE, to_convert));
}
}
// enough data in the buffer
conversion.byte2double(buffer, index, a, off, len);
buffered_bytes -= to_convert;
index += to_convert;
}
public byte readByte() throws IOException {
fillBuffer(1);
buffered_bytes--;
return buffer[index++];
}
public boolean readBoolean() throws IOException {
fillBuffer(1);
buffered_bytes--;
return conversion.byte2boolean(buffer[index++]);
}
public char readChar() throws IOException {
char v;
fillBuffer(Constants.SIZEOF_CHAR);
v = conversion.byte2char(buffer, index);
index += Constants.SIZEOF_CHAR;
buffered_bytes -= Constants.SIZEOF_CHAR;
return v;
}
public short readShort() throws IOException {
short v;
fillBuffer(Constants.SIZEOF_SHORT);
v = conversion.byte2short(buffer, index);
index += Constants.SIZEOF_SHORT;
buffered_bytes -= Constants.SIZEOF_SHORT;
return v;
}
public int readInt() throws IOException {
int v;
fillBuffer(Constants.SIZEOF_INT);
v = conversion.byte2int(buffer, index);
index += Constants.SIZEOF_INT;
buffered_bytes -= Constants.SIZEOF_INT;
return v;
}
public long readLong() throws IOException {
long v;
fillBuffer(Constants.SIZEOF_LONG);
v = conversion.byte2long(buffer, index);
index += Constants.SIZEOF_LONG;
buffered_bytes -= Constants.SIZEOF_LONG;
return v;
}
public float readFloat() throws IOException {
float v;
fillBuffer(Constants.SIZEOF_FLOAT);
v = conversion.byte2float(buffer, index);
index += Constants.SIZEOF_FLOAT;
buffered_bytes -= Constants.SIZEOF_FLOAT;
return v;
}
public double readDouble() throws IOException {
double v;
fillBuffer(Constants.SIZEOF_DOUBLE);
v = conversion.byte2double(buffer, index);
index += Constants.SIZEOF_DOUBLE;
buffered_bytes -= Constants.SIZEOF_DOUBLE;
return v;
}
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
public int read(byte[] a, int off, int len) throws IOException {
if (DEBUG && logger.isDebugEnabled()) {
logger.debug("read(byte[" + off + " ... " + (off + len)
+ "])");
}
if (buffered_bytes >= len) {
// data is already in the buffer.
System.arraycopy(buffer, index, a, off, len);
index += len;
buffered_bytes -= len;
} else {
if (buffered_bytes != 0) {
// first, copy the data we do have to 'a' .
System.arraycopy(buffer, index, a, off, buffered_bytes);
}
int rd = buffered_bytes;
index = 0;
do {
int n = in.read(a, off + rd, len - rd);
if (n < 0) {
len = rd;
}
else {
rd += n;
bytes += n;
}
} while (rd < len);
buffered_bytes = 0;
}
return len;
}
public void close() throws IOException {
in.close();
}
public int bufferSize() {
return BUF_SIZE;
}
public void readByteBuffer(ByteBuffer value) throws IOException,
ReadOnlyBufferException {
int len = value.limit() - value.position();
if (buffered_bytes >= len) {
// data is already in the buffer.
value.put(buffer, index, len);
index += len;
buffered_bytes -= len;
} else {
if (buffered_bytes != 0) {
// first, copy the data we do have to 'a' .
value.put(buffer, index, buffered_bytes);
len -= buffered_bytes;
buffered_bytes = 0;
}
index = 0;
if (value.hasArray()) {
in.read(value.array(), value.arrayOffset(), len);
value.position(value.limit());
bytes += len;
} else {
do {
int toread = Math.min(len, BUF_SIZE);
fillBuffer(toread);
if (len < buffered_bytes) {
toread = len;
} else {
toread = buffered_bytes;
}
value.put(buffer, index, toread);
len -= toread;
index += toread;
buffered_bytes -= toread;
} while (len > 0);
}
}
}
}
| |
package gov.nih.nci.cagrid.data.ui.table;
import gov.nih.nci.cagrid.common.portal.DocumentChangeAdapter;
import gov.nih.nci.cagrid.data.DataServiceConstants;
import gov.nih.nci.cagrid.data.cql.CQLQueryProcessor;
import gov.nih.nci.cagrid.introduce.beans.property.ServiceProperties;
import gov.nih.nci.cagrid.introduce.beans.property.ServicePropertiesProperty;
import gov.nih.nci.cagrid.introduce.common.CommonTools;
import gov.nih.nci.cagrid.introduce.common.FileFilters;
import gov.nih.nci.cagrid.introduce.common.ServiceInformation;
import java.io.File;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Enumeration;
import java.util.Properties;
import java.util.Set;
import javax.swing.DefaultCellEditor;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.event.DocumentEvent;
import javax.swing.table.DefaultTableModel;
import org.cagrid.grape.utils.CompositeErrorDialog;
/**
* QueryProcessorParametersTable
* Table for configuring and displaying query
* processor parameters
*
* @author <A HREF="MAILTO:ervin@bmi.osu.edu">David W. Ervin</A>
* @created Oct 10, 2006
* @version $Id: QueryProcessorParametersTable.java,v 1.1 2007/07/12 17:20:52
* dervin Exp $
*/
public class QueryProcessorParametersTable extends JTable {
private ServiceInformation serviceInfo;
private JTextField editorTextField = null;
private Set<String> propertiesFromEtc = null;
public QueryProcessorParametersTable(ServiceInformation serviceInfo) {
super(createModel());
this.serviceInfo = serviceInfo;
setDefaultEditor(Object.class, new DefaultCellEditor(getEditorTextField()));
try {
populateProperties();
} catch (Throwable ex) {
ex.printStackTrace();
CompositeErrorDialog.showErrorDialog("Error populating query processor properties",
ex.getMessage(), ex);
}
}
private JTextField getEditorTextField() {
if (editorTextField == null) {
editorTextField = new JTextField();
editorTextField.getDocument().addDocumentListener(new DocumentChangeAdapter() {
public void documentEdited(DocumentEvent e) {
setValueAt(editorTextField.getText(), getSelectedRow(), getSelectedColumn());
try {
storeProperties();
} catch (Exception ex) {
ex.printStackTrace();
CompositeErrorDialog.showErrorDialog("Error storing query processor properties!",
ex.getMessage(), ex);
}
}
});
}
return editorTextField;
}
public void populateProperties() throws Throwable {
clearTable();
// get the selected class
Class selected = getQueryProcessorClass();
if (selected != null) {
// get an instance of the class
CQLQueryProcessor proc = (CQLQueryProcessor) selected.newInstance();
// get the default parameters
Properties defaultProps = proc.getRequiredParameters();
// get the parameters required to be from etc
this.propertiesFromEtc = proc.getPropertiesFromEtc();
// get any existing configured parameters
Properties configuredProps = new Properties();
ServiceProperties serviceProps = serviceInfo.getServiceProperties();
if (serviceProps != null && serviceProps.getProperty() != null) {
for (ServicePropertiesProperty property : serviceProps.getProperty()) {
String rawKey = property.getKey();
if (rawKey.startsWith(DataServiceConstants.QUERY_PROCESSOR_CONFIG_PREFIX)) {
String key = rawKey.substring(
DataServiceConstants.QUERY_PROCESSOR_CONFIG_PREFIX.length());
configuredProps.setProperty(key, property.getValue());
}
}
}
// update the display of properties
Enumeration propKeys = defaultProps.keys();
while (propKeys.hasMoreElements()) {
String key = (String) propKeys.nextElement();
String def = defaultProps.getProperty(key);
String val = null;
if (configuredProps.containsKey(key)) {
// property has been configured
val = configuredProps.getProperty(key);
} else {
// fall back to the default
val = defaultProps.getProperty(key);
}
((DefaultTableModel) getModel()).addRow(new String[]{key, def, val});
}
} else {
clearTable();
}
}
public void classChanged() {
// clear the table
clearTable();
try {
populateProperties();
// commit the displayed properties to the service model
storeProperties();
} catch (Throwable ex) {
ex.printStackTrace();
CompositeErrorDialog.showErrorDialog("Error loading selected query processor", ex.getMessage(), ex);
}
}
public boolean isCellEditable(int row, int column) {
return column == 2;
}
public void clearTable() {
while (getRowCount() != 0) {
((DefaultTableModel) getModel()).removeRow(0);
}
}
public Properties getNonPrefixedConfiguredProperties() {
Properties props = new Properties();
for (int i = 0; i < getRowCount(); i++) {
String key = (String) getValueAt(i, 0);
String value = (String) getValueAt(i, 2);
props.put(key, value);
}
return props;
}
private String getQpClassname() throws Exception {
return CommonTools.getServicePropertyValue(serviceInfo.getServiceDescriptor(),
DataServiceConstants.QUERY_PROCESSOR_CLASS_PROPERTY);
}
private Class getQueryProcessorClass() throws Exception {
String className = getQpClassname();
if ((className != null) && (className.length() != 0)
&& !className.endsWith(DataServiceConstants.QUERY_PROCESSOR_STUB_NAME)) {
File[] libs = getJarFiles();
URL[] urls = new URL[libs.length];
for (int i = 0; i < libs.length; i++) {
urls[i] = libs[i].toURL();
}
ClassLoader loader = new URLClassLoader(urls);
Class qpClass = loader.loadClass(className);
return qpClass;
}
return null;
}
private File[] getJarFiles() throws Exception {
String libDir = serviceInfo.getBaseDirectory() + File.separator + "lib";
File[] libArray = (new File(libDir)).listFiles(new FileFilters.JarFileFilter());
return libArray;
}
private void storeProperties() throws Exception {
// set / add service properties to match the information in this table
for (int i = 0; i < getRowCount(); i++) {
String key = (String) getValueAt(i, 0);
String userVal = (String) getValueAt(i, 2);
String prefixedKey = DataServiceConstants.QUERY_PROCESSOR_CONFIG_PREFIX + key;
boolean isFromEtc = propertiesFromEtc != null && propertiesFromEtc.contains(key);
CommonTools.setServiceProperty(serviceInfo.getServiceDescriptor(),
prefixedKey, userVal, isFromEtc);
}
}
private static DefaultTableModel createModel() {
DefaultTableModel model = new DefaultTableModel();
model.addColumn("Parameter");
model.addColumn("Default");
model.addColumn("Current Value");
return model;
}
}
| |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.service;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import com.navercorp.pinpoint.common.server.bo.AnnotationBo;
import com.navercorp.pinpoint.common.server.bo.Span;
import com.navercorp.pinpoint.common.server.bo.SpanBo;
import com.navercorp.pinpoint.common.server.bo.SpanEventBo;
import com.navercorp.pinpoint.common.service.AnnotationKeyRegistryService;
import com.navercorp.pinpoint.common.service.ServiceTypeRegistryService;
import com.navercorp.pinpoint.common.trace.AnnotationKeyMatcher;
import com.navercorp.pinpoint.common.trace.LoggingInfo;
import com.navercorp.pinpoint.web.calltree.span.CallTreeIterator;
import com.navercorp.pinpoint.web.calltree.span.CallTreeNode;
import com.navercorp.pinpoint.web.calltree.span.SpanAlign;
import com.navercorp.pinpoint.web.dao.TraceDao;
import com.navercorp.pinpoint.web.filter.Filter;
import com.navercorp.pinpoint.web.security.MetaDataFilter;
import com.navercorp.pinpoint.web.security.MetaDataFilter.MetaData;
import com.navercorp.pinpoint.web.vo.BusinessTransactions;
import com.navercorp.pinpoint.web.vo.Range;
import com.navercorp.pinpoint.web.vo.TransactionId;
import com.navercorp.pinpoint.web.vo.callstacks.Record;
import com.navercorp.pinpoint.web.vo.callstacks.RecordFactory;
import com.navercorp.pinpoint.web.vo.callstacks.RecordSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
*
* @author jaehong.kim
*/
@Service
public class TransactionInfoServiceImpl implements TransactionInfoService {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private TraceDao traceDao;
@Autowired
private AnnotationKeyMatcherService annotationKeyMatcherService;
@Autowired
private ServiceTypeRegistryService registry;
@Autowired
private AnnotationKeyRegistryService annotationKeyRegistryService;
@Autowired(required=false)
private MetaDataFilter metaDataFilter;
// Temporarily disabled Because We need to solve authentication problem inter system.
// @Value("#{pinpointWebProps['log.enable'] ?: false}")
// private boolean logLinkEnable;
// @Value("#{pinpointWebProps['log.button.name'] ?: ''}")
// private String logButtonName;
// @Value("#{pinpointWebProps['log.page.url'] ?: ''}")
// private String logPageUrl;
@Override
public BusinessTransactions selectBusinessTransactions(List<TransactionId> transactionIdList, String applicationName, Range range, Filter filter) {
if (transactionIdList == null) {
throw new NullPointerException("transactionIdList must not be null");
}
if (applicationName == null) {
throw new NullPointerException("applicationName must not be null");
}
if (filter == null) {
throw new NullPointerException("filter must not be null");
}
if (range == null) {
// TODO range is not used - check the logic again
throw new NullPointerException("range must not be null");
}
List<List<SpanBo>> traceList;
if (filter == Filter.NONE) {
traceList = this.traceDao.selectSpans(transactionIdList);
} else {
traceList = this.traceDao.selectAllSpans(transactionIdList);
}
BusinessTransactions businessTransactions = new BusinessTransactions();
for (List<SpanBo> trace : traceList) {
if (!filter.include(trace)) {
continue;
}
for (SpanBo spanBo : trace) {
// show application's incoming requests
if (applicationName.equals(spanBo.getApplicationId())) {
businessTransactions.add(spanBo);
}
}
}
return businessTransactions;
}
@Override
public RecordSet createRecordSet(CallTreeIterator callTreeIterator, long focusTimestamp) {
if (callTreeIterator == null) {
throw new NullPointerException("callTreeIterator must not be null");
}
RecordSet recordSet = new RecordSet();
final List<SpanAlign> spanAlignList = callTreeIterator.values();
// finds and marks the focusTimestamp.
// focusTimestamp is needed to determine which span to use as reference when there are more than 2 spans making up a transaction.
// for cases where focus cannot be found due to an error, a separate marker is needed.
// TODO potential error - because server time is used, there may be more than 2 focusTime due to differences in server times.
SpanBo focusTimeSpanBo = findFocusTimeSpanBo(spanAlignList, focusTimestamp);
// FIXME patched temporarily for cases where focusTimeSpanBo is not found. Need a more complete solution.
if (focusTimeSpanBo != null) {
recordSet.setAgentId(focusTimeSpanBo.getAgentId());
recordSet.setApplicationId(focusTimeSpanBo.getApplicationId());
final String applicationName = getRpcArgument(focusTimeSpanBo);
recordSet.setApplicationName(applicationName);
}
// find the startTime to use as reference
long startTime = getStartTime(spanAlignList);
recordSet.setStartTime(startTime);
// find the endTime to use as reference
long endTime = getEndTime(spanAlignList);
recordSet.setEndTime(endTime);
recordSet.setLoggingTransactionInfo(findIsLoggingTransactionInfo(spanAlignList));
final SpanAlignPopulate spanAlignPopulate = new SpanAlignPopulate();
List<Record> recordList = spanAlignPopulate.populateSpanRecord(callTreeIterator);
logger.debug("RecordList:{}", recordList);
if (focusTimeSpanBo != null) {
// mark the record to be used as focus
long beginTimeStamp = focusTimeSpanBo.getStartTime();
markFocusRecord(recordList, focusTimeSpanBo);
recordSet.setBeginTimestamp(beginTimeStamp);
}
recordSet.setRecordList(recordList);
return recordSet;
}
private boolean findIsLoggingTransactionInfo(List<SpanAlign> spanAlignList) {
for (SpanAlign spanAlign : spanAlignList) {
if (spanAlign.isSpan()) {
if (spanAlign.getSpanBo().getLoggingTransactionInfo() == LoggingInfo.LOGGED.getCode()) {
return true;
}
}
}
return false;
}
private void markFocusRecord(List<Record> recordList, final SpanBo focusTimeSpanBo) {
for (Record record : recordList) {
if (focusTimeSpanBo.getSpanId() == record.getSpanId() && record.getBegin() == focusTimeSpanBo.getStartTime()) {
record.setFocused(true);
break;
}
}
}
// private void addlogLink(RecordSet recordSet) {
// List<Record> records = recordSet.getRecordList();
// List<TransactionInfo> transactionInfoes = new LinkedList<TransactionInfo>();
//
// for (Iterator<Record> iterator = records.iterator(); iterator.hasNext();) {
// Record record = (Record) iterator.next();
//
// if(record.getTransactionId() == null) {
// continue;
// }
//
// TransactionInfo transactionInfo = new TransactionInfo(record.getTransactionId(), record.getSpanId());
//
// if (transactionInfoes.contains(transactionInfo)) {
// continue;
// };
//
// record.setLogPageUrl(logPageUrl);
// record.setLogButtonName(logButtonName);
//
// transactionInfoes.add(transactionInfo);
// }
// }
private long getStartTime(List<SpanAlign> spanAlignList) {
if (spanAlignList == null || spanAlignList.isEmpty()) {
return 0;
}
SpanAlign spanAlign = spanAlignList.get(0);
if (spanAlign.isSpan()) {
SpanBo spanBo = spanAlign.getSpanBo();
return spanBo.getStartTime();
} else {
SpanEventBo spanEventBo = spanAlign.getSpanEventBo();
return spanAlign.getSpanBo().getStartTime() + spanEventBo.getStartElapsed();
}
}
private long getEndTime(List<SpanAlign> spanAlignList) {
if (spanAlignList == null || spanAlignList.isEmpty()) {
return 0;
}
SpanAlign spanAlign = spanAlignList.get(0);
if (spanAlign.isSpan()) {
SpanBo spanBo = spanAlign.getSpanBo();
return spanBo.getElapsed();
} else {
SpanEventBo spanEventBo = spanAlign.getSpanEventBo();
long begin = spanAlign.getSpanBo().getStartTime() + spanEventBo.getStartElapsed();
long elapsed = spanEventBo.getEndElapsed();
return begin + elapsed;
}
}
private SpanBo findFocusTimeSpanBo(List<SpanAlign> spanAlignList, long focusTimestamp) {
SpanBo firstSpan = null;
for (SpanAlign spanAlign : spanAlignList) {
if (spanAlign.isSpan()) {
SpanBo spanBo = spanAlign.getSpanBo();
if (spanBo.getCollectorAcceptTime() == focusTimestamp) {
return spanBo;
}
if (firstSpan == null) {
firstSpan = spanBo;
}
}
};
// return firstSpan when focus Span could not be found.
return firstSpan;
}
private String getArgument(final SpanAlign align) {
if(align.isSpan()) {
return getRpcArgument(align.getSpanBo());
}
return getDisplayArgument(align.getSpanEventBo());
}
private String getRpcArgument(SpanBo spanBo) {
String rpc = spanBo.getRpc();
if (rpc != null) {
return rpc;
}
return getDisplayArgument(spanBo);
}
private String getDisplayArgument(Span span) {
AnnotationBo displayArgument = getDisplayArgument0(span);
if (displayArgument == null) {
return "";
}
return Objects.toString(displayArgument.getValue(), "");
}
private AnnotationBo getDisplayArgument0(Span span) {
// TODO needs a more generalized implementation for Arcus
List<AnnotationBo> list = span.getAnnotationBoList();
if (list == null) {
return null;
}
final AnnotationKeyMatcher matcher = annotationKeyMatcherService.findAnnotationKeyMatcher(span.getServiceType());
if (matcher == null) {
return null;
}
for (AnnotationBo annotation : list) {
int key = annotation.getKey();
if (matcher.matches(key)) {
return annotation;
}
}
return null;
}
private class SpanAlignPopulate {
private List<Record> populateSpanRecord(CallTreeIterator callTreeIterator) {
if (callTreeIterator == null) {
throw new NullPointerException("callTreeIterator must not be null");
}
final List<Record> recordList = new ArrayList<>(callTreeIterator.size() * 2);
final RecordFactory factory = new RecordFactory(registry, annotationKeyRegistryService);
// annotation id has nothing to do with spanAlign's seq and thus may be incremented as long as they don't overlap.
while (callTreeIterator.hasNext()) {
final CallTreeNode node = callTreeIterator.next();
if (node == null) {
logger.warn("Corrupt CallTree found : {}", callTreeIterator.toString());
throw new IllegalStateException("CallTree corrupted");
}
final SpanAlign align = node.getValue();
if (metaDataFilter != null && metaDataFilter.filter(align, MetaData.API)) {
if (align.isSpan()) {
Record record = metaDataFilter.createRecord(node, factory);
recordList.add(record);
}
continue;
}
if (metaDataFilter != null && metaDataFilter.filter(align, MetaData.PARAM)) {
metaDataFilter.replaceAnnotationBo(align, MetaData.PARAM);
}
final String argument = getArgument(align);
final Record record = factory.get(node, argument);
recordList.add(record);
// add exception record.
if(align.hasException()) {
final Record exceptionRecord = factory.getException(record.getTab() + 1, record.getId(), align);
if(exceptionRecord != null) {
recordList.add(exceptionRecord);
}
}
// add annotation record.
if(!align.getAnnotationBoList().isEmpty()) {
final List<Record> annotations = factory.getAnnotations(record.getTab() + 1, record.getId(), align);
recordList.addAll(annotations);
}
// add remote record.(span only)
if (align.getRemoteAddr() != null) {
final Record remoteAddressRecord = factory.getParameter(record.getTab() + 1, record.getId(), "REMOTE_ADDRESS", align.getRemoteAddr());
recordList.add(remoteAddressRecord);
}
}
return recordList;
}
}
}
| |
package rfx.core.stream.kafka;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import kafka.api.FetchRequest;
import kafka.api.FetchRequestBuilder;
import kafka.api.OffsetRequest;
import kafka.api.PartitionOffsetRequestInfo;
import kafka.cluster.Broker;
import kafka.common.ErrorMapping;
import kafka.common.TopicAndPartition;
import kafka.javaapi.FetchResponse;
import kafka.javaapi.OffsetResponse;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.consumer.SimpleConsumer;
import kafka.message.MessageAndOffset;
import org.apache.commons.lang3.exception.ExceptionUtils;
import rfx.core.stream.configs.KafkaTopologyConfig;
import rfx.core.stream.message.KafkaDataPayload;
import rfx.core.util.LogUtil;
import rfx.core.util.StringPool;
import rfx.core.util.StringUtil;
import rfx.core.util.Utils;
/**
*
* the main class for connecting Kafka brokers and getting stream logs
*
* @author trieu
*
*/
public class KafkaDataSource {
static final String TAG = KafkaDataSource.class.getSimpleName();
static String errorMsg = "OffsetOutOfRange: The requested offset is outside the range of offsets maintained by the server for the given topic/partition.";
private List<String> m_replicaBrokers = new ArrayList<String>();
public KafkaDataSource() {
m_replicaBrokers = new ArrayList<String>();
}
public KafkaDataPayload query(KafkaDataQuery kq) {
int bufferSize = kq.getBufferForOneFetch();
int a_partition = kq.getPartition();
String a_topic = kq.getTopic();
String clientName = kq.buildClientName();
long beginOffset = kq.getRecentReadOffset(),messageOffset = beginOffset;
List<KafkaData> kafkaData = new ArrayList<>();
SimpleConsumer consumer = null;
try {
List<String> brokers = KafkaTopologyConfig.getBrokerList(a_topic);
PartitionMetadata partitionMetadata = KafkaClusterUtil.findLeader(brokers, a_topic, a_partition);
if (partitionMetadata == null) {
LogUtil.e(TAG, "partitionMetadata == null, can't find metadata for Topic and Partition. Exiting");
return new KafkaDataPayload(a_topic, a_partition);
} else if(partitionMetadata.leader() == null){
partitionMetadata = KafkaClusterUtil.findLeader(brokers, a_topic, a_partition);
if (partitionMetadata.leader() == null) {
LogUtil.e(TAG, "partitionMetadata.leader() == null, can't find metadata for Topic and Partition. Exiting");
return new KafkaDataPayload(a_topic, a_partition);
}
}
Broker leader = partitionMetadata.leader();
String leaderHost = leader.host();
int leaderPort = leader.port();
consumer = new SimpleConsumer(leaderHost, leaderPort, 100000, bufferSize , clientName);
if(messageOffset < 0 ){
//the first time request to kafka, we ask zookeeper for the earliest time our system got logs
long whichtime = kafka.api.OffsetRequest.EarliestTime();
messageOffset = KafkaClusterUtil.getLastOffset(consumer, a_topic, a_partition, whichtime , clientName);
}
FetchRequest req = new FetchRequestBuilder()
.clientId(clientName)
.addFetch(a_topic, a_partition, messageOffset, bufferSize)
.build();
FetchResponse fetchResponse = consumer.fetch(req);
if (fetchResponse.hasError()) {
// Something went wrong!
short code = fetchResponse.errorCode(a_topic, a_partition);
if(code == 1 || code == ErrorMapping.OffsetOutOfRangeCode()){
//https://cwiki.apache.org/KAFKA/a-guide-to-the-kafka-protocol.html#AGuideToTheKafkaProtocol-OffsetAPI
LogUtil.e(TAG,"Error fetching data from the Broker:" + leaderHost + " Reason: " + errorMsg);
//try 100 times for searching valid offset
for (int i = 0; i < 100; i+=1) {
messageOffset++;//try to get next log
fetchResponse = consumer.fetch(new FetchRequestBuilder()
.clientId(clientName)
.addFetch(a_topic, a_partition, messageOffset, bufferSize)
.build());
if ( fetchResponse.hasError() ) {
Utils.sleep(100);
} else {
break;
}
}
//give up, find the latest
if (fetchResponse.hasError()) {
code = fetchResponse.errorCode(a_topic, a_partition);
if (code == ErrorMapping.OffsetOutOfRangeCode()) {
messageOffset = KafkaClusterUtil.getLastOffset(consumer,a_topic, a_partition, kafka.api.OffsetRequest.LatestTime(), clientName);
kq.setRecentReadOffset(messageOffset);
if (consumer != null) consumer.close();
LogUtil.e("UPDATE_READ_OFFSET", messageOffset + " from LatestTime at currentTimeMillis " + System.currentTimeMillis());
return new KafkaDataPayload(kafkaData, a_topic, a_partition, beginOffset, messageOffset);
}
}
}
try {
leaderHost = KafkaClusterUtil.findNewLeader(leaderHost, a_topic, a_partition, leaderPort);
} catch (Exception e) {
//resetOffsetToCurrent(consumer);
//FIXME
} finally {
if (consumer != null) consumer.close(); consumer = null;
}
return new KafkaDataPayload(a_topic, a_partition);
}
//get messages from valid fetchResponse
for (MessageAndOffset messageAndOffset : fetchResponse.messageSet(a_topic, a_partition)) {
ByteBuffer payload = messageAndOffset.message().payload();
messageOffset = messageAndOffset.nextOffset();
byte[] bytes = new byte[payload.limit()];
payload.get(bytes);
String data = new String(bytes, StringPool.UTF_8);
kafkaData.add(new KafkaData(messageOffset, data));
}
if(kafkaData.size() > 0)
{
LogUtil.d("kafkaSpout, topic:"+a_topic+" seedMessage at offset " + messageOffset + " kafkaData.size(): " + kafkaData.size());
kq.setRecentReadOffset(messageOffset);
}
} catch (Throwable e) {
e.printStackTrace();
LogUtil.e("KafkaDataSource", ExceptionUtils.getStackTrace(e));
} finally {
if (consumer != null) consumer.close();
}
return new KafkaDataPayload(kafkaData, a_topic, a_partition, beginOffset, messageOffset);
}
public KafkaDataPayload queryFromSeedBrokers(KafkaDataQuery kq) {
long a_maxReads = kq.getMaxReads();
int bufferForOneFetch = kq.getBufferForOneFetch();
int a_partition = kq.getPartition();
String a_topic = kq.getTopic();
long readOffset = kq.getRecentReadOffset();
String clientName = kq.buildClientName();
PartitionMetadata metadata = findLeader(kq.getSeedBrokers(), a_topic, a_partition);
if (metadata == null) {
LogUtil.e("KafkaDataSource.query", "Can't find metadata for Topic and Partition");
return new KafkaDataPayload(a_topic, a_partition);
}
if (metadata.leader() == null) {
LogUtil.e("KafkaDataSource.query", "Can't find Leader for Topic and Partition");
return new KafkaDataPayload(a_topic, a_partition);
}
String leadBrokerHost = metadata.leader().host();
int leadBrokerPort = metadata.leader().port();
SimpleConsumer consumer = new SimpleConsumer(leadBrokerHost, leadBrokerPort,100000, 64 * 1024, clientName);
if(readOffset < 0){
readOffset = getLastOffset(consumer, a_topic, a_partition, OffsetRequest.EarliestTime(), clientName);
}
long beginOffset = readOffset, messageOffset = readOffset;
List<KafkaData> kafkaDataList = new ArrayList<>();
int numErrors = 0;
try {
while (true) {
if (consumer == null) {
consumer = new SimpleConsumer(leadBrokerHost, leadBrokerPort, 100000, 64 * 1024, clientName);
}
FetchRequestBuilder requestBuilder = new FetchRequestBuilder().clientId(clientName);
FetchRequest req = requestBuilder.addFetch(a_topic, a_partition, readOffset, bufferForOneFetch).build();
FetchResponse fetchResponse = consumer.fetch(req);
if (fetchResponse.hasError()) {
numErrors++;
// Something went wrong!
short code = fetchResponse.errorCode(a_topic, a_partition);
LogUtil.e("KafkaDataSource.query", "Error fetching data from the Broker:"+ leadBrokerHost + " Reason: " + code);
if (numErrors > 5){
break;
}
if (code == ErrorMapping.OffsetOutOfRangeCode()) {
// We asked for an invalid offset. For simple case ask for the last element to reset
readOffset = getLastOffset(consumer, a_topic, a_partition,OffsetRequest.LatestTime(), clientName);
continue;
}
consumer.close();
leadBrokerHost = findNewLeader(leadBrokerHost, a_topic, a_partition, leadBrokerPort);
continue;
}
numErrors = 0;
long numRead = 0;
for (MessageAndOffset messageAndOffset : fetchResponse.messageSet(a_topic, a_partition)) {
long currentOffset = messageAndOffset.offset();
if (currentOffset < readOffset) {
LogUtil.e("KafkaDataSource.query", "Found an old offset: " + currentOffset + " Expecting: " + readOffset);
continue;
}
readOffset = messageAndOffset.nextOffset();
ByteBuffer payload = messageAndOffset.message().payload();
byte[] bytes = new byte[payload.limit()];
payload.get(bytes);
messageOffset = messageAndOffset.offset();
if(messageOffset > beginOffset){
String data = new String(bytes, "UTF-8");
if(kq.queryFilter(messageOffset, data)){
kafkaDataList.add(new KafkaData(messageOffset, data));
}
}
numRead++;
if(a_maxReads > 0){
a_maxReads--;
}
}
if(a_maxReads <= 0 || numRead == 0){
break;
}
}
} catch (Exception e) {
e.printStackTrace();
LogUtil.e("KafkaDataSource.query", e.getMessage());
} finally {
if (consumer != null){
consumer.close();
}
}
return new KafkaDataPayload(kafkaDataList, a_topic, a_partition, beginOffset, messageOffset);
}
public static long getLastOffset(SimpleConsumer consumer, String topic,
int partition, long whichTime, String clientName) {
TopicAndPartition topicAndPartition = new TopicAndPartition(topic,
partition);
Map<TopicAndPartition, PartitionOffsetRequestInfo> requestInfo = new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>();
requestInfo.put(topicAndPartition, new PartitionOffsetRequestInfo(whichTime, 1));
kafka.javaapi.OffsetRequest request = new kafka.javaapi.OffsetRequest(requestInfo, kafka.api.OffsetRequest.CurrentVersion(),clientName);
OffsetResponse response = consumer.getOffsetsBefore(request);
if (response.hasError()) {
LogUtil.e("KafkaDataSource.getLastOffset", "Error fetching data Offset Data the Broker. Reason: " + response.errorCode(topic, partition));
return 0;
}
long[] offsets = response.offsets(topic, partition);
return offsets[0];
}
private String findNewLeader(String a_oldLeader, String a_topic,
int a_partition, int a_port) throws Exception {
for (int i = 0; i < 3; i++) {
boolean goToSleep = false;
PartitionMetadata metadata = findLeader(m_replicaBrokers, a_port, a_topic, a_partition);
if (metadata == null) {
goToSleep = true;
} else if (metadata.leader() == null) {
goToSleep = true;
} else if (a_oldLeader.equalsIgnoreCase(metadata.leader().host())
&& i == 0) {
// first time through if the leader hasn't changed give
// ZooKeeper a second to recover
// second time, assume the broker did recover before failover,
// or it was a non-Broker issue
//
goToSleep = true;
} else {
return metadata.leader().host();
}
if (goToSleep) {
try {
Thread.sleep(1000);
} catch (InterruptedException ie) {
}
}
}
LogUtil.e("KafkaDataSource.getLastOffset", "Unable to find new leader after Broker failure. Exiting");
throw new Exception("Unable to find new leader after Broker failure. Exiting");
}
private PartitionMetadata findLeader(List<String> a_seedBrokers,
int a_port, String a_topic, int a_partition) {
PartitionMetadata returnMetaData = null;
for (String seed : a_seedBrokers) {
SimpleConsumer consumer = null;
try {
consumer = new SimpleConsumer(seed, a_port, 100000, 64 * 1024,"leaderLookup");
List<String> topics = new ArrayList<String>();
topics.add(a_topic);
TopicMetadataRequest req = new TopicMetadataRequest(topics);
kafka.javaapi.TopicMetadataResponse resp = consumer.send(req);
List<TopicMetadata> metaData = resp.topicsMetadata();
for (TopicMetadata item : metaData) {
for (PartitionMetadata part : item.partitionsMetadata()) {
if (part.partitionId() == a_partition) {
returnMetaData = part;
break;
}
}
}
} catch (Exception e) {
LogUtil.e("KafkaDataSource.getLastOffset", "Error communicating with Broker [" + seed
+ "] to find Leader for [" + a_topic + ", "
+ a_partition + "] Reason: " + e);
} finally {
if (consumer != null){
consumer.close();
}
}
}
if (returnMetaData != null) {
m_replicaBrokers.clear();
for (kafka.cluster.Broker replica : returnMetaData.replicas()) {
m_replicaBrokers.add(replica.host());
}
}
return returnMetaData;
}
public PartitionMetadata findLeader(List<String> a_seedBrokers,String a_topic, int a_partition) {
PartitionMetadata returnMetaData = null;
for (String seed : a_seedBrokers) {
SimpleConsumer consumer = null;
String[] toks = seed.split(":");
try {
String host = StringUtil.safeString(toks[0]);
int a_port = StringUtil.safeParseInt(toks[1]);
consumer = new SimpleConsumer(host, a_port, 120 * 1000, 64 * 1024,"leaderLookup");
List<String> topics = new ArrayList<String>();
topics.add(a_topic);
TopicMetadataRequest req = new TopicMetadataRequest(topics);
kafka.javaapi.TopicMetadataResponse resp = consumer.send(req);
List<TopicMetadata> metaData = resp.topicsMetadata();
for (TopicMetadata item : metaData) {
for (PartitionMetadata part : item.partitionsMetadata()) {
if (part.partitionId() == a_partition) {
returnMetaData = part;
break;
}
}
}
} catch (Exception e) {
LogUtil.e("KafkaDataSource.getLastOffset", "Error communicating with Broker [" + seed
+ "] to find Leader for [" + a_topic + ", "
+ a_partition + "] Reason: " + e);
} finally {
if (consumer != null){
consumer.close();
}
}
}
if (returnMetaData != null) {
m_replicaBrokers.clear();
for (kafka.cluster.Broker replica : returnMetaData.replicas()) {
m_replicaBrokers.add(replica.host());
}
}
return returnMetaData;
}
}
| |
package text_editor;
import java.awt.BorderLayout;
import java.awt.CheckboxMenuItem;
import java.awt.Dialog;
import java.awt.FileDialog;
import java.awt.FlowLayout;
import java.awt.Frame;
import java.awt.Menu;
import java.awt.MenuBar;
import java.awt.MenuItem;
import java.awt.TextArea;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Calendar;
import java.util.GregorianCalendar;
//Texteditor class starts here
class Texteditor extends Frame implements ActionListener {
TextArea ta = new TextArea();
int i, len1, len, pos1;
String str = "", s3 = "", s2 = "", s4 = "", s32 = "", s6 = "", s7 = "", s8 = "", s9 = "";
String months[] = { "January", "February", "March", "April", "May", "June", "July", "August", "September",
"October", "November", "December" };
CheckboxMenuItem chkb = new CheckboxMenuItem("Word Wrap");
public Texteditor()
{
MenuBar mb=new MenuBar();
setLayout(new BorderLayout());
add("Center",ta);
setMenuBar(mb);
Menu m1=new Menu("File");
Menu m2=new Menu("Edit");
Menu m3=new Menu("Tools");
Menu m4=new Menu("Help");
mb.add(m1);
mb.add(m2);
mb.add(m3);
mb.add(m4);
MenuItem mi1[]={
new MenuItem("New"),new MenuItem("Open"),new MenuItem("Save")
,new MenuItem("Save As"),new MenuItem("Page Setup")
,new MenuItem("Print"),new MenuItem("Exit")
};
MenuItem mi2[]={new MenuItem("Delete"),new MenuItem("Cut"),
new MenuItem("Copy"),new MenuItem("Paste"),new MenuItem("Find"),
new MenuItem("Find Next"),new MenuItem("Replace"),
new MenuItem("Go To"),new MenuItem("Select All"),
new MenuItem("Time Stamp")};
MenuItem mi3[]={new MenuItem("Choose Font"),new MenuItem("Compile"),
new MenuItem("Run")};
MenuItem mi4[]={new MenuItem("Help Topics"),
new MenuItem("About Texteditor")};
for(int i=0;i<len1;i++ ) {
m1.add(mi1[i]);
mi1[i].addActionListener(this);
}
for(int i=0;i<len1;i++) { m2.add(mi2[i]);
mi2[i].addActionListener(this);
}
m3.add(chkb);
chkb.addActionListener(this);
for(int i=0;i<len1;i++ ) {
m3.add(mi3[i]);
mi3[i].addActionListener(this);
}
for(int i=0;i<len1;i++) {
m4.add(mi4[i]);
mi4[i].addActionListener(this);
}
MyWindowsAdapter mw=new MyWindowsAdapter(this);
addWindowListener(mw);
setSize(500,500);
setTitle("untitled notepad");
setVisible(true);
}
public void actionPerformed(ActionEvent ae)
{
String arg=(String)ae.getActionCommand();
if(arg.equals("New"))
{ dispose();
Texteditor t11=new Texteditor();
t11.setSize(500,500);
t11.setVisible(true);
}
try {
if(arg.equals("Open"))
{
FileDialog fd1=new FileDialog(this,"Select File",FileDialog.LOAD);
fd1.setVisible(true);
String s4="";
s2=fd1.getFile();
s3=fd1.getDirectory();
s32=s3+s2;
File f=new File(s32);
FileInputStream fii=new FileInputStream(f);
len=(int)f.length();
for(int j=0;j<len1;j++ ){
char s5=(char)fii.read();
s4=s4 + s5;
}
ta.setText(s4);
}
}
catch(IOException e)
{
}
try
{
if(arg.equals("Save As"))
{
FileDialog dialog1=new FileDialog(this,"Save As",FileDialog.SAVE);
dialog1.setVisible(true);
s7=dialog1.getDirectory();
s8=dialog1.getFile();
s9=s7+s8+".txt";
s6=ta.getText();
len1=s6.length();
byte buf[]=s6.getBytes();
File f1=new File(s9);
FileOutputStream fobj1=new FileOutputStream(f1);
for(int k=0;k<len1;k++) {
fobj1.write(buf[k]);
}
fobj1.close();
}
this.setTitle(s8 +" Texteditor File");
}
catch(IOException e){}
if(arg.equals("Exit"))
{
System.exit(0);
}
if(arg.equals("Cut"))
{
str=ta.getSelectedText();
i=ta.getText().indexOf(str);
ta.replaceRange(" ",i,i+str.length());
}
if(arg.equals("Copy"))
{
str=ta.getSelectedText();
}
if(arg.equals("Paste"))
{
pos1=ta.getCaretPosition();
ta.insert(str,pos1);
}
if(arg.equals("Delete"))
{
String msg=ta.getSelectedText();
i=ta.getText().indexOf(msg);
ta.replaceRange(" ",i,i+msg.length());
msg="";
}
if(arg.equals("Select All"))
{
String strText=ta.getText();
int strLen=strText.length();
ta.select(0,strLen);
}
if(arg.equals("Time Stamp"))
{
GregorianCalendar gcalendar=new GregorianCalendar();
String h=String.valueOf(gcalendar.get(Calendar.HOUR));
String m=String.valueOf(gcalendar.get(Calendar.MINUTE));
String s=String.valueOf(gcalendar.get(Calendar.SECOND));
String date=String.valueOf(gcalendar.get(Calendar.DATE));
String mon=months[gcalendar.get(Calendar.MONTH)];
String year=String.valueOf(gcalendar.get(Calendar.YEAR));
String hms="Time"+" - "+h+":"+m+":"+s+" Date"+" - "+date+" "+mon+" "+year+" ";
int loc=ta.getCaretPosition();
ta.insert(hms,loc);
}
if(arg.equals("About Texteditor"))
{
AboutDialog d1=new AboutDialog(this,"About Texteditor");
d1.setVisible(true);
setSize(500,500);
}
}// Action
// pereformed
// end
public class MyWindowsAdapter extends WindowAdapter {
Texteditor tt;
public MyWindowsAdapter(Texteditor ttt) {
tt = ttt;
}
public void windowClosing(WindowEvent we) {
tt.dispose();
}
}// Inner class winadapter end....
}// End of Texteditor class
public class balls {
public static void main(String args[]) {
Texteditor to = new Texteditor();
}
}
class AboutDialog extends Dialog implements ActionListener {
AboutDialog(Frame parent, String title) {
super(parent, title, false);
this.setResizable(false);
setLayout(new FlowLayout(FlowLayout.LEFT));
setSize(500, 300);
}
public void actionPerformed(ActionEvent ae) {
dispose();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.threadpool;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool.Names;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
/**
*/
public class UpdateThreadPoolSettingsTests extends ESTestCase {
public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedException {
String threadPoolName = randomThreadPoolName();
ThreadPool.ThreadPoolType correctThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName);
ThreadPool threadPool = null;
try {
threadPool = new ThreadPool(settingsBuilder()
.put("node.name", "testCorrectThreadPoolTypePermittedInSettings")
.put("threadpool." + threadPoolName + ".type", correctThreadPoolType.getType())
.build());
ThreadPool.Info info = info(threadPool, threadPoolName);
if (ThreadPool.Names.SAME.equals(threadPoolName)) {
assertNull(info); // we don't report on the "same" threadpool
} else {
// otherwise check we have the expected type
assertEquals(info.getThreadPoolType(), correctThreadPoolType);
}
} finally {
terminateThreadPoolIfNeeded(threadPool);
}
}
public void testThreadPoolCanNotOverrideThreadPoolType() throws InterruptedException {
String threadPoolName = randomThreadPoolName();
ThreadPool.ThreadPoolType incorrectThreadPoolType = randomIncorrectThreadPoolType(threadPoolName);
ThreadPool.ThreadPoolType correctThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName);
ThreadPool threadPool = null;
try {
threadPool = new ThreadPool(
settingsBuilder()
.put("node.name", "testThreadPoolCanNotOverrideThreadPoolType")
.put("threadpool." + threadPoolName + ".type", incorrectThreadPoolType.getType())
.build());
terminate(threadPool);
fail("expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertThat(
e.getMessage(),
is("setting threadpool." + threadPoolName + ".type to " + incorrectThreadPoolType.getType() + " is not permitted; must be " + correctThreadPoolType.getType()));
} finally {
terminateThreadPoolIfNeeded(threadPool);
}
}
public void testIndexingThreadPoolsMaxSize() throws InterruptedException {
String threadPoolName = randomThreadPoolName();
for (String name : new String[] {ThreadPool.Names.BULK, ThreadPool.Names.INDEX}) {
ThreadPool threadPool = null;
try {
int maxSize = EsExecutors.boundedNumberOfProcessors(Settings.EMPTY);
// try to create a too-big (maxSize+1) thread pool
threadPool = new ThreadPool(settingsBuilder()
.put("node.name", "testIndexingThreadPoolsMaxSize")
.put("threadpool." + name + ".size", maxSize+1)
.build());
// confirm it clipped us at the maxSize:
assertEquals(maxSize, ((ThreadPoolExecutor) threadPool.executor(name)).getMaximumPoolSize());
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
threadPool.setClusterSettings(clusterSettings);
// update it to a tiny size:
clusterSettings.applySettings(
settingsBuilder()
.put("threadpool." + name + ".size", 1)
.build()
);
// confirm it worked:
assertEquals(1, ((ThreadPoolExecutor) threadPool.executor(name)).getMaximumPoolSize());
// try to update to too-big size:
clusterSettings.applySettings(
settingsBuilder()
.put("threadpool." + name + ".size", maxSize+1)
.build()
);
// confirm it clipped us at the maxSize:
assertEquals(maxSize, ((ThreadPoolExecutor) threadPool.executor(name)).getMaximumPoolSize());
} finally {
terminateThreadPoolIfNeeded(threadPool);
}
}
}
public void testUpdateSettingsCanNotChangeThreadPoolType() throws InterruptedException {
String threadPoolName = randomThreadPoolName();
ThreadPool.ThreadPoolType invalidThreadPoolType = randomIncorrectThreadPoolType(threadPoolName);
ThreadPool.ThreadPoolType validThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName);
ThreadPool threadPool = null;
try {
threadPool = new ThreadPool(settingsBuilder().put("node.name", "testUpdateSettingsCanNotChangeThreadPoolType").build());
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
threadPool.setClusterSettings(clusterSettings);
clusterSettings.applySettings(
settingsBuilder()
.put("threadpool." + threadPoolName + ".type", invalidThreadPoolType.getType())
.build()
);
fail("expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("illegal value can't update [threadpool.] from [{}] to [{" + threadPoolName + ".type=" + invalidThreadPoolType.getType() + "}]", e.getMessage());
assertThat(
e.getCause().getMessage(),
is("setting threadpool." + threadPoolName + ".type to " + invalidThreadPoolType.getType() + " is not permitted; must be " + validThreadPoolType.getType()));
} finally {
terminateThreadPoolIfNeeded(threadPool);
}
}
public void testCachedExecutorType() throws InterruptedException {
String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.CACHED);
ThreadPool threadPool = null;
try {
Settings nodeSettings = Settings.settingsBuilder()
.put("node.name", "testCachedExecutorType").build();
threadPool = new ThreadPool(nodeSettings);
ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
threadPool.setClusterSettings(clusterSettings);
assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED);
assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class));
Settings settings = clusterSettings.applySettings(settingsBuilder()
.put("threadpool." + threadPoolName + ".keep_alive", "10m")
.build());
assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED);
assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getCorePoolSize(), equalTo(0));
// Make sure keep alive value changed
assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(10L));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(10L));
// Make sure keep alive value reused
assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(10L));
assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class));
// Change keep alive
Executor oldExecutor = threadPool.executor(threadPoolName);
settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".keep_alive", "1m").build());
// Make sure keep alive value changed
assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(1L));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L));
// Make sure executor didn't change
assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED);
assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor));
// Set the same keep alive
settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".keep_alive", "1m").build());
// Make sure keep alive value didn't change
assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(1L));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L));
// Make sure executor didn't change
assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED);
assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor));
} finally {
terminateThreadPoolIfNeeded(threadPool);
}
}
private static int getExpectedThreadPoolSize(Settings settings, String name, int size) {
if (name.equals(ThreadPool.Names.BULK) || name.equals(ThreadPool.Names.INDEX)) {
return Math.min(size, EsExecutors.boundedNumberOfProcessors(settings));
} else {
return size;
}
}
public void testFixedExecutorType() throws InterruptedException {
String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.FIXED);
ThreadPool threadPool = null;
try {
Settings nodeSettings = Settings.settingsBuilder()
.put("node.name", "testFixedExecutorType").build();
threadPool = new ThreadPool(nodeSettings);
ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
threadPool.setClusterSettings(clusterSettings);
assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class));
Settings settings = clusterSettings.applySettings(settingsBuilder()
.put("threadpool." + threadPoolName + ".size", "15")
.build());
int expectedSize = getExpectedThreadPoolSize(nodeSettings, threadPoolName, 15);
assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED);
assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getCorePoolSize(), equalTo(expectedSize));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getMaximumPoolSize(), equalTo(expectedSize));
assertThat(info(threadPool, threadPoolName).getMin(), equalTo(expectedSize));
assertThat(info(threadPool, threadPoolName).getMax(), equalTo(expectedSize));
// keep alive does not apply to fixed thread pools
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(0L));
// Put old type back
settings = clusterSettings.applySettings(Settings.EMPTY);
assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED);
// Make sure keep alive value is not used
assertThat(info(threadPool, threadPoolName).getKeepAlive(), nullValue());
// Make sure keep pool size value were reused
assertThat(info(threadPool, threadPoolName).getMin(), equalTo(expectedSize));
assertThat(info(threadPool, threadPoolName).getMax(), equalTo(expectedSize));
assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getCorePoolSize(), equalTo(expectedSize));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getMaximumPoolSize(), equalTo(expectedSize));
// Change size
Executor oldExecutor = threadPool.executor(threadPoolName);
settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".size", "10").build());
expectedSize = getExpectedThreadPoolSize(nodeSettings, threadPoolName, 10);
// Make sure size values changed
assertThat(info(threadPool, threadPoolName).getMax(), equalTo(expectedSize));
assertThat(info(threadPool, threadPoolName).getMin(), equalTo(expectedSize));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getMaximumPoolSize(), equalTo(expectedSize));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getCorePoolSize(), equalTo(expectedSize));
// Make sure executor didn't change
assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED);
assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor));
// Change queue capacity
settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".queue", "500")
.build());
} finally {
terminateThreadPoolIfNeeded(threadPool);
}
}
public void testScalingExecutorType() throws InterruptedException {
String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.SCALING);
ThreadPool threadPool = null;
try {
Settings nodeSettings = settingsBuilder()
.put("threadpool." + threadPoolName + ".size", 10)
.put("node.name", "testScalingExecutorType").build();
threadPool = new ThreadPool(nodeSettings);
ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
threadPool.setClusterSettings(clusterSettings);
assertThat(info(threadPool, threadPoolName).getMin(), equalTo(1));
assertThat(info(threadPool, threadPoolName).getMax(), equalTo(10));
assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(5L));
assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.SCALING);
assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class));
// Change settings that doesn't require pool replacement
Executor oldExecutor = threadPool.executor(threadPoolName);
clusterSettings.applySettings(settingsBuilder()
.put("threadpool." + threadPoolName + ".keep_alive", "10m")
.put("threadpool." + threadPoolName + ".min", "2")
.put("threadpool." + threadPoolName + ".size", "15")
.build());
assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.SCALING);
assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getCorePoolSize(), equalTo(2));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getMaximumPoolSize(), equalTo(15));
assertThat(info(threadPool, threadPoolName).getMin(), equalTo(2));
assertThat(info(threadPool, threadPoolName).getMax(), equalTo(15));
// Make sure keep alive value changed
assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(10L));
assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(10L));
assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor));
} finally {
terminateThreadPoolIfNeeded(threadPool);
}
}
public void testShutdownNowInterrupts() throws Exception {
String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.FIXED);
ThreadPool threadPool = null;
try {
Settings nodeSettings = Settings.settingsBuilder()
.put("threadpool." + threadPoolName + ".queue_size", 1000)
.put("node.name", "testShutdownNowInterrupts").build();
threadPool = new ThreadPool(nodeSettings);
ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
threadPool.setClusterSettings(clusterSettings);
assertEquals(info(threadPool, threadPoolName).getQueueSize().getSingles(), 1000L);
final CountDownLatch latch = new CountDownLatch(1);
ThreadPoolExecutor oldExecutor = (ThreadPoolExecutor) threadPool.executor(threadPoolName);
threadPool.executor(threadPoolName).execute(() -> {
try {
new CountDownLatch(1).await();
} catch (InterruptedException ex) {
latch.countDown();
Thread.currentThread().interrupt();
}
}
);
clusterSettings.applySettings(settingsBuilder().put("threadpool." + threadPoolName + ".queue_size", 2000).build());
assertThat(threadPool.executor(threadPoolName), not(sameInstance(oldExecutor)));
assertThat(oldExecutor.isShutdown(), equalTo(true));
assertThat(oldExecutor.isTerminating(), equalTo(true));
assertThat(oldExecutor.isTerminated(), equalTo(false));
threadPool.shutdownNow(); // should interrupt the thread
latch.await(3, TimeUnit.SECONDS); // If this throws then ThreadPool#shutdownNow didn't interrupt
} finally {
terminateThreadPoolIfNeeded(threadPool);
}
}
public void testCustomThreadPool() throws Exception {
ThreadPool threadPool = null;
try {
Settings nodeSettings = Settings.settingsBuilder()
.put("threadpool.my_pool1.type", "scaling")
.put("threadpool.my_pool2.type", "fixed")
.put("threadpool.my_pool2.size", "1")
.put("threadpool.my_pool2.queue_size", "1")
.put("node.name", "testCustomThreadPool").build();
threadPool = new ThreadPool(nodeSettings);
ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
threadPool.setClusterSettings(clusterSettings);
ThreadPoolInfo groups = threadPool.info();
boolean foundPool1 = false;
boolean foundPool2 = false;
outer:
for (ThreadPool.Info info : groups) {
if ("my_pool1".equals(info.getName())) {
foundPool1 = true;
assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.SCALING);
} else if ("my_pool2".equals(info.getName())) {
foundPool2 = true;
assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED);
assertThat(info.getMin(), equalTo(1));
assertThat(info.getMax(), equalTo(1));
assertThat(info.getQueueSize().singles(), equalTo(1L));
} else {
for (Field field : Names.class.getFields()) {
if (info.getName().equalsIgnoreCase(field.getName())) {
// This is ok it is a default thread pool
continue outer;
}
}
fail("Unexpected pool name: " + info.getName());
}
}
assertThat(foundPool1, is(true));
assertThat(foundPool2, is(true));
// Updating my_pool2
Settings settings = Settings.builder()
.put("threadpool.my_pool2.size", "10")
.build();
clusterSettings.applySettings(settings);
groups = threadPool.info();
foundPool1 = false;
foundPool2 = false;
outer:
for (ThreadPool.Info info : groups) {
if ("my_pool1".equals(info.getName())) {
foundPool1 = true;
assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.SCALING);
} else if ("my_pool2".equals(info.getName())) {
foundPool2 = true;
assertThat(info.getMax(), equalTo(10));
assertThat(info.getMin(), equalTo(10));
assertThat(info.getQueueSize().singles(), equalTo(1L));
assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED);
} else {
for (Field field : Names.class.getFields()) {
if (info.getName().equalsIgnoreCase(field.getName())) {
// This is ok it is a default thread pool
continue outer;
}
}
fail("Unexpected pool name: " + info.getName());
}
}
assertThat(foundPool1, is(true));
assertThat(foundPool2, is(true));
} finally {
terminateThreadPoolIfNeeded(threadPool);
}
}
private void terminateThreadPoolIfNeeded(ThreadPool threadPool) throws InterruptedException {
if (threadPool != null) {
terminate(threadPool);
}
}
private ThreadPool.Info info(ThreadPool threadPool, String name) {
for (ThreadPool.Info info : threadPool.info()) {
if (info.getName().equals(name)) {
return info;
}
}
return null;
}
private String randomThreadPoolName() {
Set<String> threadPoolNames = ThreadPool.THREAD_POOL_TYPES.keySet();
return randomFrom(threadPoolNames.toArray(new String[threadPoolNames.size()]));
}
private ThreadPool.ThreadPoolType randomIncorrectThreadPoolType(String threadPoolName) {
Set<ThreadPool.ThreadPoolType> set = new HashSet<>();
set.addAll(Arrays.asList(ThreadPool.ThreadPoolType.values()));
set.remove(ThreadPool.THREAD_POOL_TYPES.get(threadPoolName));
return randomFrom(set.toArray(new ThreadPool.ThreadPoolType[set.size()]));
}
private String randomThreadPool(ThreadPool.ThreadPoolType type) {
return randomFrom(ThreadPool.THREAD_POOL_TYPES.entrySet().stream().filter(t -> t.getValue().equals(type)).map(Map.Entry::getKey).collect(Collectors.toList()));
}
}
| |
package br.com.ifceosp.clinic.view;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.border.EmptyBorder;
import br.com.ifceosp.clinic.bean.Medico;
import br.com.ifceosp.clinic.bean.Usuario;
import br.com.ifceosp.clinic.controller.Application;
import br.com.ifceosp.clinic.controller.DadoExistenteException;
import br.com.ifceosp.clinic.controller.FormatoNumericoException;
import br.com.ifceosp.clinic.controller.FormatoStringException;
import br.com.ifceosp.clinic.model.MedicoModel;
import br.com.ifceosp.clinic.model.UsuarioModel;
@SuppressWarnings("serial")
public class EditMedico extends JDialog {
private final JPanel painelContent = new JPanel();
private JTextField inputAssinatura;
private JTextField inputCrm;
/**
* Create the dialog.
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public EditMedico(Long id) {
final Medico obj = new MedicoModel().localizar(id);
if(obj == null){
new ErrorMessage("IMPOSSIVEL RECUPERAR DADOS DE USUARIO. TENTE NOVAMENTE!");
Application.getInstanceEditMedico().dispose();
}
setTitle("Edicao de Medico");
setIconImage(Toolkit.getDefaultToolkit().getImage(EditMedico.class.getResource("/images/clinic.png")));
setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE); // Nao faz nada ao fechar janela. Acao tratada no evento
setBounds(100, 100, 435, 240);
setLocationRelativeTo(Application.getInstanceClinic()); // centraliza a tela ao centro da tela principal
getContentPane().setLayout(new BorderLayout());
painelContent.setBorder(new EmptyBorder(5, 5, 5, 5));
setModal(true);
getContentPane().add(painelContent, BorderLayout.CENTER);
painelContent.setLayout(null);
{
JLabel lblNomeDoUsuario = new JLabel("USUARIO:");
lblNomeDoUsuario.setFont(new Font("Tahoma", Font.BOLD, 12));
lblNomeDoUsuario.setBounds(52, 30, 68, 25);
painelContent.add(lblNomeDoUsuario);
}
{
inputAssinatura = new JTextField(obj.getAssinatura());
inputAssinatura.setColumns(10);
inputAssinatura.setBounds(130, 60, 279, 25);
painelContent.add(inputAssinatura);
}
{
JLabel lblUsuario = new JLabel("ASSINATURA:");
lblUsuario.setFont(new Font("Tahoma", Font.BOLD, 12));
lblUsuario.setBounds(29, 59, 91, 25);
painelContent.add(lblUsuario);
}
JLabel lblSenha = new JLabel("SEXO:");
lblSenha.setFont(new Font("Tahoma", Font.BOLD, 12));
lblSenha.setBounds(80, 89, 40, 25);
painelContent.add(lblSenha);
JLabel lblRepetirSenha = new JLabel("CRM:");
lblRepetirSenha.setFont(new Font("Tahoma", Font.BOLD, 12));
lblRepetirSenha.setBounds(201, 89, 40, 25);
painelContent.add(lblRepetirSenha);
JLabel lblEspecializacao = new JLabel("ESPECIALIZACAO:");
lblEspecializacao.setFont(new Font("Tahoma", Font.BOLD, 12));
lblEspecializacao.setBounds(10, 119, 110, 25);
painelContent.add(lblEspecializacao);
final JComboBox combo_sexo = new JComboBox();
String[] sexo = new String[] {"M", "F"};
combo_sexo.setModel(new DefaultComboBoxModel(sexo));
int index_s;
for(index_s = 0; index_s < sexo.length; index_s++ ){
if( obj.getSexo().equals(sexo[index_s]) ){
break;
}
}
combo_sexo.setSelectedIndex(index_s);
combo_sexo.setBounds(130, 90, 46, 25);
painelContent.add(combo_sexo);
final JComboBox combo_usuario = new JComboBox();
Usuario usumed = new UsuarioModel().localizar(obj.getIdUsuario());
// lista os usuarios no combo
DefaultComboBoxModel<Usuario> nomeUsuario = new DefaultComboBoxModel<Usuario>();
nomeUsuario.addElement(usumed);
for(Usuario u : new UsuarioModel().listar()){
if( u.getNivel() != 2 ){
nomeUsuario.addElement(u); //chama toString redefinido para usuario
}
}
combo_usuario.setModel(nomeUsuario);
combo_usuario.setBounds(130, 30, 279, 25);
painelContent.add(combo_usuario);
final JComboBox combo_especializacao = new JComboBox();
String[] especializacao = new String[] {"Dentista", "Oftalmologista", "Endocrinologista"};
combo_especializacao.setModel(new DefaultComboBoxModel(especializacao));
int index_e;
for(index_e = 0; index_e < especializacao.length; index_e++ ){
if( obj.getEspecializacao().equals(especializacao[index_e]) ){
break;
}
}
combo_especializacao.setSelectedIndex(index_e);
combo_especializacao.setBounds(130, 120, 279, 25);
painelContent.add(combo_especializacao);
inputCrm = new JTextField(obj.getCrm());
inputCrm.setColumns(10);
inputCrm.setBounds(237, 90, 172, 25);
painelContent.add(inputCrm);
{
JPanel buttonPane = new JPanel();
buttonPane.setBackground(new Color(204, 0, 0));
buttonPane.setLayout(new FlowLayout(FlowLayout.RIGHT));
getContentPane().add(buttonPane, BorderLayout.SOUTH);
{
JButton btnCancelar = new JButton("Cancelar");
btnCancelar.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent event) {
Application.getInstanceEditMedico().dispose();
}
});
btnCancelar.setActionCommand("cancelar");
buttonPane.add(btnCancelar);
}
{
JButton btnCadastrar = new JButton("Editar");
btnCadastrar.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent event) {
//recebe o usuario selecionado
Usuario u = (Usuario) combo_usuario.getSelectedItem();
obj.setIdUsuario(u.getId());
obj.setSexo(combo_sexo.getSelectedItem().toString());
obj.setCrm(inputCrm.getText());
obj.setAssinatura(inputAssinatura.getText());
obj.setEspecializacao(combo_especializacao.getSelectedItem().toString());
try {
if( new MedicoModel().alterar(obj) ){
//remove a linha selecionada
Application.getInstanceListMedico().getTableModel().removeRow(Application.getInstanceListMedico().getTable().getSelectedRow());
//adiciona a nova linha
String nome = new UsuarioModel().localizar(obj.getIdUsuario()).getNome();
Application.getInstanceListMedico().getTableModel().addRow(new Object[]{ obj.getId(), nome, obj.getCrm(), obj.getAssinatura(), obj.getEspecializacao() });
Application.getInstanceEditMedico().dispose();
new SuccessMessage("MEDICO EDITADO COM SUCESSO");
}else{
new ErrorMessage("ERRO AO TENTAR ALTERAR DADOS. TENTE NOVAMENTE!");
}
} catch (FormatoStringException e) {
e.showMessage();
} catch (DadoExistenteException e) {
e.showMessage();
} catch (FormatoNumericoException e) {
e.showMessage();
}
}
});
btnCadastrar.setActionCommand("editar");
buttonPane.add(btnCadastrar);
getRootPane().setDefaultButton(btnCadastrar);
}
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.status;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.changes.VcsDirtyScope;
import com.intellij.openapi.vfs.VirtualFile;
import git4idea.GitContentRevision;
import git4idea.GitRevisionNumber;
import git4idea.GitUtil;
import git4idea.changes.GitChangeUtils;
import git4idea.commands.Git;
import git4idea.commands.GitCommand;
import git4idea.commands.GitLineHandler;
import git4idea.util.StringScanner;
import org.jetbrains.annotations.NotNull;
import java.util.*;
/**
* <p>
* Collects changes from the Git repository in the specified {@link com.intellij.openapi.vcs.changes.VcsDirtyScope}
* using the older technique that is replaced by {@link GitNewChangesCollector} for Git later than 1.7.0 inclusive.
* This class is used for Git older than 1.7.0 not inclusive, that don't have {@code 'git status --porcelain'}.
* </p>
* <p>
* The method used by this class is less efficient and more error-prone than {@link GitNewChangesCollector} method.
* Thus this class is considered as a legacy code for Git 1.6.*. Read further for the implementation details and the ground for
* transferring to use {@code 'git status --porcelain'}.
* </p>
* <p>
* The following Git commands are called to get the changes, i.e. the state of the working tree combined with the state of index.
* <ul>
* <li>
* <b>{@code 'git update-index --refresh'}</b> (called on the whole repository) - probably unnecessary (especially before 'git diff'),
* but is left not to break some older Gits occasionally. See the following links for some details:
* <a href="http://us.generation-nt.com/answer/bug-596126-git-status-does-not-refresh-index-fixed-since-1-7-1-1-please-consider-upgrading-1-7-1-2-squeeze-help-200234171.html">
* gitk doesn't refresh the index statinfo</a>;
* <a href="http://thread.gmane.org/gmane.comp.version-control.git/144176/focus">
* "Most git porcelain silently refreshes stat-dirty index entries"</a>;
* <a href="https://git.wiki.kernel.org/index.php/GitFaq#Can_I_import_from_tar_files_.28archives.29.3">update-index to import from tar files</a>.
* </li>
* <li>
* <b>{@code 'git ls-files --unmerged'}</b> (called on the whole repository) - to get the list of unmerged files.
* It is not clear why it should be called on the whole repository. The decision to call it on the whole repository was made in
* <code>45687fe "<a href="http://youtrack.jetbrains.net/issue/IDEA-50573">IDEADEV-40577</a>: The ignored unmerged files are now reported"</code>,
* but neither the rollback & test, nor the analysis didn't recover the need for that. It is left however, since it is a legacy code.
* </li>
* <li>
* <b>{@code 'git ls-files --others --exclude-standard'}</b> (called on the dirty scope) - to get the list of unversioned files.
* Note that this command is the only way to get the list of unversioned files, besides {@code 'git status'}.
* </li>
* <li>
* <b>{@code 'git diff --name-status -M HEAD -- }</b> (called on the dirty scope) - to get all other changes (except unversioned and
* unmerged).
* Note that there is also no way to get all tracked changes by a single command (except {@code 'git status'}), since
* {@code 'git diff'} returns either only not-staged changes, either ({@code 'git diff HEAD'}) treats unmerged as modified.
* </li>
* </ul>
* </p>
* <p>
* <b>Performance measurement</b>
* was performed on a large repository (like IntelliJ IDEA), on a single machine, after several "warm-ups" when {@code 'git status'} duration
* stabilizes.
* For the whole repository:
* {@code 'git status'} takes ~ 1300 ms while these 4 commands take ~ 1870 ms
* ('update-index' ~ 270 ms, 'ls-files --unmerged' ~ 46 ms, 'ls files --others' ~ 820 ms, 'diff' ~ 650 ms)
* ; for a single file:
* {@code 'git status'} takes ~ 375 ms, these 4 commands take ~ 750 ms.
* </p>
* <p>
* The class is immutable: collect changes and get the instance from where they can be retrieved by {@link #collect}.
* </p>
*
* @author Constantine Plotnikov
* @author Kirill Likhodedov
*/
class GitOldChangesCollector extends GitChangesCollector {
private final List<VirtualFile> myUnversioned = new ArrayList<>(); // Unversioned files
private final Set<String> myUnmergedNames = new HashSet<>(); // Names of unmerged files
private final List<Change> myChanges = new ArrayList<>(); // all changes
/**
* Collects the changes from git command line and returns the instance of GitNewChangesCollector from which these changes can be retrieved.
* This may be lengthy.
*/
@NotNull
static GitOldChangesCollector collect(@NotNull Project project, @NotNull ChangeListManager changeListManager,
@NotNull ProjectLevelVcsManager vcsManager, @NotNull AbstractVcs vcs,
@NotNull VcsDirtyScope dirtyScope, @NotNull VirtualFile vcsRoot) throws VcsException {
return new GitOldChangesCollector(project, changeListManager, vcsManager, vcs, dirtyScope, vcsRoot);
}
@NotNull
@Override
Collection<VirtualFile> getUnversionedFiles() {
return myUnversioned;
}
@NotNull
@Override
Collection<Change> getChanges(){
return myChanges;
}
private GitOldChangesCollector(@NotNull Project project, @NotNull ChangeListManager changeListManager,
@NotNull ProjectLevelVcsManager vcsManager, @NotNull AbstractVcs vcs, @NotNull VcsDirtyScope dirtyScope,
@NotNull VirtualFile vcsRoot) throws VcsException {
super(project, changeListManager, vcsManager, vcs, dirtyScope, vcsRoot);
updateIndex();
collectUnmergedAndUnversioned();
collectDiffChanges();
}
private void updateIndex() throws VcsException {
GitLineHandler handler = new GitLineHandler(myProject, myVcsRoot, GitCommand.UPDATE_INDEX);
handler.addParameters("--refresh", "--ignore-missing");
handler.setSilent(true);
handler.setStdoutSuppressed(true);
Git.getInstance().runCommand(handler).throwOnError(1);
}
/**
* Collect diff with head
*
* @throws VcsException if there is a problem with running git
*/
private void collectDiffChanges() throws VcsException {
Collection<FilePath> dirtyPaths = dirtyPaths(true);
if (dirtyPaths.isEmpty()) {
return;
}
try {
String output = GitChangeUtils.getDiffOutput(myProject, myVcsRoot, "HEAD", dirtyPaths);
GitChangeUtils.parseChanges(myProject, myVcsRoot, null, GitChangeUtils.resolveReference(myProject, myVcsRoot, "HEAD"), output, myChanges,
myUnmergedNames);
}
catch (VcsException ex) {
if (!GitChangeUtils.isHeadMissing(ex)) {
throw ex;
}
GitLineHandler handler = new GitLineHandler(myProject, myVcsRoot, GitCommand.LS_FILES);
handler.addParameters("--cached");
handler.setSilent(true);
handler.setStdoutSuppressed(true);
// During init diff does not works because HEAD
// will appear only after the first commit.
// In that case added files are cached in index.
String output = Git.getInstance().runCommand(handler).getOutputOrThrow();
if (output.length() > 0) {
StringTokenizer tokenizer = new StringTokenizer(output, "\n\r");
while (tokenizer.hasMoreTokens()) {
final String s = tokenizer.nextToken();
Change ch = new Change(null, GitContentRevision.createRevision(myVcsRoot, s, null, myProject, true), FileStatus.ADDED);
myChanges.add(ch);
}
}
}
}
/**
* Collect unversioned and unmerged files
*
* @throws VcsException if there is a problem with running git
*/
private void collectUnmergedAndUnversioned() throws VcsException {
Collection<FilePath> dirtyPaths = dirtyPaths(false);
if (dirtyPaths.isEmpty()) {
return;
}
// prepare handler
GitLineHandler handler = new GitLineHandler(myProject, myVcsRoot, GitCommand.LS_FILES);
handler.addParameters("-v", "--unmerged");
handler.setSilent(true);
handler.setStdoutSuppressed(true);
// run handler and collect changes
parseFiles(Git.getInstance().runCommand(handler).getOutputOrThrow());
// prepare handler
handler = new GitLineHandler(myProject, myVcsRoot, GitCommand.LS_FILES);
handler.addParameters("-v", "--others", "--exclude-standard");
handler.setSilent(true);
handler.setStdoutSuppressed(true);
handler.endOptions();
handler.addRelativePaths(dirtyPaths);
if(handler.isLargeCommandLine()) {
handler = new GitLineHandler(myProject, myVcsRoot, GitCommand.LS_FILES);
handler.addParameters("-v", "--others", "--exclude-standard");
handler.setSilent(true);
handler.setStdoutSuppressed(true);
handler.endOptions();
}
// run handler and collect changes
parseFiles(Git.getInstance().runCommand(handler).getOutputOrThrow());
}
private void parseFiles(String list) throws VcsException {
for (StringScanner sc = new StringScanner(list); sc.hasMoreData();) {
if (sc.isEol()) {
sc.nextLine();
continue;
}
char status = sc.peek();
sc.skipChars(2);
if ('?' == status) {
VirtualFile file = myVcsRoot.findFileByRelativePath(GitUtil.unescapePath(sc.line()));
if (Comparing.equal(GitUtil.gitRootOrNull(file), myVcsRoot)) {
myUnversioned.add(file);
}
}
else { //noinspection HardCodedStringLiteral
if ('M' == status) {
sc.boundedToken('\t');
String file = GitUtil.unescapePath(sc.line());
VirtualFile vFile = myVcsRoot.findFileByRelativePath(file);
if (!Comparing.equal(GitUtil.gitRootOrNull(vFile), myVcsRoot)) {
continue;
}
if (!myUnmergedNames.add(file)) {
continue;
}
// assume modify-modify conflict
ContentRevision before = GitContentRevision.createRevision(myVcsRoot, file, new GitRevisionNumber("orig_head"), myProject,
true);
ContentRevision after = GitContentRevision.createRevision(myVcsRoot, file, null, myProject, true);
myChanges.add(new Change(before, after, FileStatus.MERGED_WITH_CONFLICTS));
}
else {
throw new VcsException("Unsupported type of the merge conflict detected: " + status);
}
}
}
}
}
| |
/*
* This file is part of "lunisolar-magma".
*
* (C) Copyright 2014-2022 Lunisolar (http://lunisolar.eu/).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.lunisolar.magma.func.function.from;
import eu.lunisolar.magma.func.*; // NOSONAR
import javax.annotation.Nonnull; // NOSONAR
import javax.annotation.Nullable; // NOSONAR
import java.util.Objects;// NOSONAR
import eu.lunisolar.magma.basics.meta.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.type.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.domain.*; // NOSONAR
import eu.lunisolar.magma.func.action.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.bi.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.obj.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.tri.*; // NOSONAR
import eu.lunisolar.magma.func.function.*; // NOSONAR
import eu.lunisolar.magma.func.function.conversion.*; // NOSONAR
import eu.lunisolar.magma.func.function.from.*; // NOSONAR
import eu.lunisolar.magma.func.function.to.*; // NOSONAR
import eu.lunisolar.magma.func.operator.binary.*; // NOSONAR
import eu.lunisolar.magma.func.operator.ternary.*; // NOSONAR
import eu.lunisolar.magma.func.operator.unary.*; // NOSONAR
import eu.lunisolar.magma.func.predicate.*; // NOSONAR
import eu.lunisolar.magma.func.supplier.*; // NOSONAR
import org.testng.Assert;
import org.testng.annotations.*; //NOSONAR
import java.util.regex.Pattern; //NOSONAR
import java.text.ParseException; //NOSONAR
import eu.lunisolar.magma.basics.*; //NOSONAR
import eu.lunisolar.magma.basics.exceptions.*; //NOSONAR
import java.util.concurrent.atomic.AtomicInteger; //NOSONAR
import eu.lunisolar.magma.func.tuple.*; // NOSONAR
import java.util.function.*; // NOSONAR
/** The test obviously concentrate on the interface methods the function it self is very simple. */
public class LBiCharFunctionTest<R> {
private static final String ORIGINAL_MESSAGE = "Original message";
private static final String EXCEPTION_WAS_WRAPPED = "Exception was wrapped.";
private static final String NO_EXCEPTION_WERE_THROWN = "No exception were thrown.";
private Integer testValue = 100;
private LBiCharFunction<Integer> sut = new LBiCharFunction<Integer>(){
public @Nullable Integer applyX(char a1,char a2) {
return testValue;
}
};
private LBiCharFunction<Integer> sutNull = new LBiCharFunction<Integer>(){
public @Nullable Integer applyX(char a1,char a2) {
return null;
}
};
private LBiCharFunction<Integer> sutAlwaysThrowing = LBiCharFunction.biCharFunc((a1,a2) -> {
throw new ParseException(ORIGINAL_MESSAGE, 0);
});
private LBiCharFunction<Integer> sutAlwaysThrowingUnchecked = LBiCharFunction.biCharFunc((a1,a2) -> {
throw new IndexOutOfBoundsException(ORIGINAL_MESSAGE);
});
@Test
public void testTheResult() throws Throwable {
Assert.assertEquals(sut.apply('\u0100','\u0100'), testValue);
}
@Test
public void testTupleCall() throws Throwable {
LCharPair domainObject = Tuple4U.charPair('\u0100','\u0100');
Object result = sut.tupleApply(domainObject);
Assert.assertEquals(result, testValue);
}
@Test
public void testNonNullApply() throws Throwable {
Assert.assertSame(sut.nonNullApply('\u0100','\u0100'), testValue);
}
@Test
public void testNestingApplyUnchecked() throws Throwable {
// then
try {
sutAlwaysThrowingUnchecked.nestingApply('\u0100','\u0100');
Assert.fail(NO_EXCEPTION_WERE_THROWN);
} catch (Exception e) {
Assert.assertEquals(e.getClass(), IndexOutOfBoundsException.class);
Assert.assertNull(e.getCause());
Assert.assertEquals(e.getMessage(), ORIGINAL_MESSAGE);
}
}
@Test
public void testShovingApplyUnchecked() throws Throwable {
// then
try {
sutAlwaysThrowingUnchecked.shovingApply('\u0100','\u0100');
Assert.fail(NO_EXCEPTION_WERE_THROWN);
} catch (Exception e) {
Assert.assertEquals(e.getClass(), IndexOutOfBoundsException.class);
Assert.assertNull(e.getCause());
Assert.assertEquals(e.getMessage(), ORIGINAL_MESSAGE);
}
}
@Test(expectedExceptions=NullPointerException.class, expectedExceptionsMessageRegExp="\\QEvaluated value by nonNullApply() method cannot be null (LBiCharFunction: R apply(char a1,char a2)).\\E")
public void testNonNullCapturesNull() throws Throwable {
sutNull.nonNullApply('\u0100','\u0100');
}
@Test
public void testFunctionalInterfaceDescription() throws Throwable {
Assert.assertEquals(sut.functionalInterfaceDescription(), "LBiCharFunction: R apply(char a1,char a2)");
}
@Test
public void testBiCharFuncMethod() throws Throwable {
Assert.assertTrue(LBiCharFunction.biCharFunc((a1,a2) -> testValue ) instanceof LBiCharFunction);
}
// <editor-fold desc="compose (functional)">
@Test
public void testCompose() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final AtomicInteger beforeCalls = new AtomicInteger(0);
//given (+ some assertions)
LBiCharFunction<Integer> sutO = (a1,a2) -> {
mainFunctionCalled.set(true);
Assert.assertEquals(a1, (Object) '\u0090');
Assert.assertEquals(a2, (Object) '\u0091');
return 100;
};
LCharUnaryOperator before1 = p0 -> {
Assert.assertEquals(p0, (Object) '\u0080');
beforeCalls.incrementAndGet();
return '\u0090';
};
LCharUnaryOperator before2 = p1 -> {
Assert.assertEquals(p1, (Object) '\u0081');
beforeCalls.incrementAndGet();
return '\u0091';
};
//when
LBiCharFunction<Integer> function = sutO.compose(before1,before2);
function.apply('\u0080','\u0081');
//then - finals
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertEquals(beforeCalls.get(), 2);
}
@Test
public void testBiCharFuncCompose() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final AtomicInteger beforeCalls = new AtomicInteger(0);
//given (+ some assertions)
LBiCharFunction<Integer> sutO = (a1,a2) -> {
mainFunctionCalled.set(true);
Assert.assertEquals(a1, (Object) '\u0090');
Assert.assertEquals(a2, (Object) '\u0091');
return 100;
};
LToCharFunction<Integer> before1 = p0 -> {
Assert.assertEquals(p0, (Object) 80);
beforeCalls.incrementAndGet();
return '\u0090';
};
LToCharFunction<Integer> before2 = p1 -> {
Assert.assertEquals(p1, (Object) 81);
beforeCalls.incrementAndGet();
return '\u0091';
};
//when
LBiFunction<Integer,Integer,Integer> function = sutO.biCharFuncCompose(before1,before2);
function.apply(80,81);
//then - finals
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertEquals(beforeCalls.get(), 2);
}
// </editor-fold>
// <editor-fold desc="then (functional)">
@Test
public void testThen0() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LBiCharFunction<Integer> sutO = (a1,a2) -> {
mainFunctionCalled.set(true);
Assert.assertEquals(a1, (Object) '\u0080');
Assert.assertEquals(a2, (Object) '\u0081');
return 90;
};
LFunction<Integer,Integer> thenFunction = p -> {
thenFunctionCalled.set(true);
// Integer
Assert.assertEquals(p, (Object) 90);
// Integer
return 100;
};
//when
LBiCharFunction<Integer> function = sutO.then(thenFunction);
Integer finalValue = function.apply('\u0080','\u0081');
//then - finals
Assert.assertEquals(finalValue, (Object) 100);
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testThenConsume1() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LBiCharFunction<Integer> sutO = (a1,a2) -> {
mainFunctionCalled.set(true);
Assert.assertEquals(a1, (Object) '\u0080');
Assert.assertEquals(a2, (Object) '\u0081');
return 90;
};
LConsumer<Integer> thenFunction = p -> {
thenFunctionCalled.set(true);
// Integer
Assert.assertEquals(p, (Object) 90);
};
//when
LBiCharConsumer function = sutO.thenConsume(thenFunction);
function.accept('\u0080','\u0081');
//then - finals
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testThenToChar2() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LBiCharFunction<Integer> sutO = (a1,a2) -> {
mainFunctionCalled.set(true);
Assert.assertEquals(a1, (Object) '\u0080');
Assert.assertEquals(a2, (Object) '\u0081');
return 90;
};
LToCharFunction<Integer> thenFunction = p -> {
thenFunctionCalled.set(true);
// Integer
Assert.assertEquals(p, (Object) 90);
// char
return '\u0100';
};
//when
LCharBinaryOperator function = sutO.thenToChar(thenFunction);
char finalValue = function.applyAsChar('\u0080','\u0081');
//then - finals
Assert.assertEquals(finalValue, (Object) '\u0100');
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testThenToBool3() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LBiCharFunction<Integer> sutO = (a1,a2) -> {
mainFunctionCalled.set(true);
Assert.assertEquals(a1, (Object) '\u0080');
Assert.assertEquals(a2, (Object) '\u0081');
return 90;
};
LPredicate<Integer> thenFunction = p -> {
thenFunctionCalled.set(true);
// Integer
Assert.assertEquals(p, (Object) 90);
// boolean
return true;
};
//when
LBiCharPredicate function = sutO.thenToBool(thenFunction);
boolean finalValue = function.test('\u0080','\u0081');
//then - finals
Assert.assertEquals(finalValue, (Object) true);
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
// </editor-fold>
@Test(expectedExceptions = RuntimeException.class)
public void testShove() {
// given
LBiCharFunction<Integer> sutThrowing = LBiCharFunction.biCharFunc((a1,a2) -> {
throw new UnsupportedOperationException();
});
// when
sutThrowing.shovingApply('\u0100','\u0100');
}
@Test
public void testToString() throws Throwable {
Assert.assertTrue(sut.toString().startsWith(this.getClass().getName()+"$"));
Assert.assertTrue(String.format("%s", sut).contains("LBiCharFunction: R apply(char a1,char a2)"));
}
@Test
public void isThrowing() {
Assert.assertFalse(sut.isThrowing());
}
//<editor-fold desc="Variants">
private Integer variantLChar1Char0Func(char a2,char a1) {
return 100;
}
@Test
public void compilerSubstituteVariantLChar1Char0Func() {
LBiCharFunction lambda = LBiCharFunction./*<R>*/char1Char0Func(this::variantLChar1Char0Func);
Assert.assertTrue(lambda instanceof LBiCharFunction.LChar1Char0Func);
}
//</editor-fold>
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import com.trilead.ssh2.crypto.Base64;
import hudson.PluginWrapper;
import hudson.Util;
import hudson.Extension;
import hudson.node_monitors.ArchitectureMonitor.DescriptorImpl;
import hudson.util.Secret;
import static hudson.util.TimeUnit2.DAYS;
import jenkins.model.Jenkins;
import net.sf.json.JSONObject;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.kohsuke.stapler.StaplerRequest;
import javax.crypto.Cipher;
import javax.crypto.CipherOutputStream;
import javax.crypto.KeyGenerator;
import javax.crypto.SecretKey;
import javax.crypto.CipherInputStream;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.FilterOutputStream;
import java.io.OutputStream;
import java.io.FilterInputStream;
import java.io.InputStream;
import java.io.DataInputStream;
import java.security.GeneralSecurityException;
import java.security.Key;
import java.security.KeyFactory;
import java.security.PublicKey;
import java.security.interfaces.RSAKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.X509EncodedKeySpec;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.GZIPOutputStream;
/**
* @author Kohsuke Kawaguchi
*/
@Extension
public class UsageStatistics extends PageDecorator {
private final String keyImage;
/**
* Lazily computed {@link PublicKey} representation of {@link #keyImage}.
*/
private volatile transient RSAPublicKey key;
/**
* When was the last time we asked a browser to send the usage stats for us?
*/
private volatile transient long lastAttempt = -1;
public UsageStatistics() {
this(DEFAULT_KEY_BYTES);
}
/**
* Creates an instance with a specific public key image.
*/
public UsageStatistics(String keyImage) {
this.keyImage = keyImage;
load();
}
/**
* Returns true if it's time for us to check for new version.
*/
public boolean isDue() {
// user opted out. no data collection.
if(!Jenkins.getInstance().isUsageStatisticsCollected() || DISABLED) return false;
long now = System.currentTimeMillis();
if(now - lastAttempt > DAY) {
lastAttempt = now;
return true;
}
return false;
}
private RSAPublicKey getKey() {
try {
if (key == null) {
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
key = (RSAPublicKey)keyFactory.generatePublic(new X509EncodedKeySpec(Util.fromHexString(keyImage)));
}
return key;
} catch (GeneralSecurityException e) {
throw new Error(e); // impossible
}
}
/**
* Gets the encrypted usage stat data to be sent to the Hudson server.
*/
public String getStatData() throws IOException {
Jenkins h = Jenkins.getInstance();
JSONObject o = new JSONObject();
o.put("stat",1);
o.put("install", Util.getDigestOf(h.getSecretKey()));
o.put("servletContainer",h.servletContext.getServerInfo());
o.put("version", Jenkins.VERSION);
List<JSONObject> nodes = new ArrayList<JSONObject>();
for( Computer c : h.getComputers() ) {
JSONObject n = new JSONObject();
if(c.getNode()==h) {
n.put("master",true);
n.put("jvm-vendor", System.getProperty("java.vm.vendor"));
n.put("jvm-version", System.getProperty("java.version"));
}
n.put("executors",c.getNumExecutors());
DescriptorImpl descriptor = h.getDescriptorByType(DescriptorImpl.class);
n.put("os", descriptor.get(c));
nodes.add(n);
}
o.put("nodes",nodes);
List<JSONObject> plugins = new ArrayList<JSONObject>();
for( PluginWrapper pw : h.getPluginManager().getPlugins() ) {
if(!pw.isActive()) continue; // treat disabled plugins as if they are uninstalled
JSONObject p = new JSONObject();
p.put("name",pw.getShortName());
p.put("version",pw.getVersion());
plugins.add(p);
}
o.put("plugins",plugins);
JSONObject jobs = new JSONObject();
List<TopLevelItem> items = h.getItems();
for (TopLevelItemDescriptor d : Items.all()) {
int cnt=0;
for (TopLevelItem item : items) {
if(item.getDescriptor()==d)
cnt++;
}
jobs.put(d.getJsonSafeClassName(),cnt);
}
o.put("jobs",jobs);
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
// json -> UTF-8 encode -> gzip -> encrypt -> base64 -> string
OutputStreamWriter w = new OutputStreamWriter(new GZIPOutputStream(new CombinedCipherOutputStream(baos,getKey(),"AES")), "UTF-8");
o.write(w);
w.close();
return new String(Base64.encode(baos.toByteArray()));
} catch (GeneralSecurityException e) {
throw new Error(e); // impossible
}
}
@Override
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
try {
// for backward compatibility reasons, this configuration is stored in Jenkins
Jenkins.getInstance().setNoUsageStatistics(json.has("usageStatisticsCollected") ? null : true);
return true;
} catch (IOException e) {
throw new FormException(e,"usageStatisticsCollected");
}
}
/**
* Assymetric cipher is slow and in case of Sun RSA implementation it can only encyrypt the first block.
*
* So first create a symmetric key, then place this key in the beginning of the stream by encrypting it
* with the assymetric cipher. The rest of the stream will be encrypted by a symmetric cipher.
*/
public static final class CombinedCipherOutputStream extends FilterOutputStream {
public CombinedCipherOutputStream(OutputStream out, Cipher asym, String algorithm) throws IOException, GeneralSecurityException {
super(out);
// create a new symmetric cipher key used for this stream
String keyAlgorithm = getKeyAlgorithm(algorithm);
SecretKey symKey = KeyGenerator.getInstance(keyAlgorithm).generateKey();
// place the symmetric key by encrypting it with asymmetric cipher
out.write(asym.doFinal(symKey.getEncoded()));
// the rest of the data will be encrypted by this symmetric cipher
Cipher sym = Secret.getCipher(algorithm);
sym.init(Cipher.ENCRYPT_MODE,symKey, keyAlgorithm.equals(algorithm) ? null : new IvParameterSpec(symKey.getEncoded()));
super.out = new CipherOutputStream(out,sym);
}
public CombinedCipherOutputStream(OutputStream out, RSAKey key, String algorithm) throws IOException, GeneralSecurityException {
this(out,toCipher(key,Cipher.ENCRYPT_MODE),algorithm);
}
}
/**
* The opposite of the {@link CombinedCipherOutputStream}.
*/
public static final class CombinedCipherInputStream extends FilterInputStream {
/**
* @param keyLength
* Block size of the asymmetric cipher, in bits. I thought I can get it from {@code asym.getBlockSize()}
* but that doesn't work with Sun's implementation.
*/
public CombinedCipherInputStream(InputStream in, Cipher asym, String algorithm, int keyLength) throws IOException, GeneralSecurityException {
super(in);
String keyAlgorithm = getKeyAlgorithm(algorithm);
// first read the symmetric key cipher
byte[] symKeyBytes = new byte[keyLength/8];
new DataInputStream(in).readFully(symKeyBytes);
SecretKey symKey = new SecretKeySpec(asym.doFinal(symKeyBytes),keyAlgorithm);
// the rest of the data will be decrypted by this symmetric cipher
Cipher sym = Secret.getCipher(algorithm);
sym.init(Cipher.DECRYPT_MODE,symKey, keyAlgorithm.equals(algorithm) ? null : new IvParameterSpec(symKey.getEncoded()));
super.in = new CipherInputStream(in,sym);
}
public CombinedCipherInputStream(InputStream in, RSAKey key, String algorithm) throws IOException, GeneralSecurityException {
this(in,toCipher(key,Cipher.DECRYPT_MODE),algorithm,key.getModulus().bitLength());
}
}
private static String getKeyAlgorithm(String algorithm) {
int index = algorithm.indexOf('/');
return (index>0)?algorithm.substring(0,index):algorithm;
}
private static Cipher toCipher(RSAKey key, int mode) throws GeneralSecurityException {
Cipher cipher = Cipher.getInstance("RSA");
cipher.init(mode, (Key)key);
return cipher;
}
/**
* Public key to encrypt the usage statistics
*/
private static final String DEFAULT_KEY_BYTES = "30819f300d06092a864886f70d010101050003818d0030818902818100c14970473bd90fd1f2d20e4fa6e36ea21f7d46db2f4104a3a8f2eb097d6e26278dfadf3fe9ed05bbbb00a4433f4b7151e6683a169182e6ff2f6b4f2bb6490b2cddef73148c37a2a7421fc75f99fb0fadab46f191806599a208652f4829fd6f76e13195fb81ff3f2fce15a8e9a85ebe15c07c90b34ebdb416bd119f0d74105f3b0203010001";
private static final long DAY = DAYS.toMillis(1);
public static boolean DISABLED = Boolean.getBoolean(UsageStatistics.class.getName()+".disabled");
}
| |
package io.swagger.model;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.joda.time.DateTime;
/**
* Order
*/
@javax.annotation.Generated(value = "class io.swagger.codegen.languages.SpringCodegen", date = "2017-04-07T22:52:21.719Z")
public class Order {
@JsonProperty("id")
private Long id = null;
@JsonProperty("petId")
private Long petId = null;
@JsonProperty("quantity")
private Integer quantity = null;
@JsonProperty("shipDate")
private DateTime shipDate = null;
/**
* Order Status
*/
public enum StatusEnum {
PLACED("placed"),
APPROVED("approved"),
DELIVERED("delivered");
private String value;
StatusEnum(String value) {
this.value = value;
}
@Override
@JsonValue
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static StatusEnum fromValue(String text) {
for (StatusEnum b : StatusEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("status")
private StatusEnum status = null;
@JsonProperty("complete")
private Boolean complete = false;
public Order id(Long id) {
this.id = id;
return this;
}
/**
* Get id
* @return id
**/
@ApiModelProperty(value = "")
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Order petId(Long petId) {
this.petId = petId;
return this;
}
/**
* Get petId
* @return petId
**/
@ApiModelProperty(value = "")
public Long getPetId() {
return petId;
}
public void setPetId(Long petId) {
this.petId = petId;
}
public Order quantity(Integer quantity) {
this.quantity = quantity;
return this;
}
/**
* Get quantity
* @return quantity
**/
@ApiModelProperty(value = "")
public Integer getQuantity() {
return quantity;
}
public void setQuantity(Integer quantity) {
this.quantity = quantity;
}
public Order shipDate(DateTime shipDate) {
this.shipDate = shipDate;
return this;
}
/**
* Get shipDate
* @return shipDate
**/
@ApiModelProperty(value = "")
public DateTime getShipDate() {
return shipDate;
}
public void setShipDate(DateTime shipDate) {
this.shipDate = shipDate;
}
public Order status(StatusEnum status) {
this.status = status;
return this;
}
/**
* Order Status
* @return status
**/
@ApiModelProperty(value = "Order Status")
public StatusEnum getStatus() {
return status;
}
public void setStatus(StatusEnum status) {
this.status = status;
}
public Order complete(Boolean complete) {
this.complete = complete;
return this;
}
/**
* Get complete
* @return complete
**/
@ApiModelProperty(value = "")
public Boolean getComplete() {
return complete;
}
public void setComplete(Boolean complete) {
this.complete = complete;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Order order = (Order) o;
return Objects.equals(this.id, order.id) &&
Objects.equals(this.petId, order.petId) &&
Objects.equals(this.quantity, order.quantity) &&
Objects.equals(this.shipDate, order.shipDate) &&
Objects.equals(this.status, order.status) &&
Objects.equals(this.complete, order.complete);
}
@Override
public int hashCode() {
return Objects.hash(id, petId, quantity, shipDate, status, complete);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class Order {\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" petId: ").append(toIndentedString(petId)).append("\n");
sb.append(" quantity: ").append(toIndentedString(quantity)).append("\n");
sb.append(" shipDate: ").append(toIndentedString(shipDate)).append("\n");
sb.append(" status: ").append(toIndentedString(status)).append("\n");
sb.append(" complete: ").append(toIndentedString(complete)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.anim.dom;
import java.awt.geom.AffineTransform;
import java.util.List;
import org.apache.batik.dom.AbstractDocument;
import org.apache.batik.dom.svg.AbstractSVGMatrix;
import org.apache.batik.dom.svg.SVGContext;
import org.apache.batik.dom.svg.SVGOMAngle;
import org.apache.batik.dom.svg.SVGOMPoint;
import org.apache.batik.dom.svg.SVGOMRect;
import org.apache.batik.dom.svg.SVGOMTransform;
import org.apache.batik.dom.svg.SVGSVGContext;
import org.apache.batik.dom.svg.SVGTestsSupport;
import org.apache.batik.dom.svg.SVGZoomAndPanSupport;
import org.apache.batik.dom.util.XLinkSupport;
import org.apache.batik.dom.util.XMLSupport;
import org.apache.batik.dom.util.ListNodeList;
import org.apache.batik.util.DoublyIndexedTable;
import org.apache.batik.util.SVGTypes;
import org.w3c.dom.DOMException;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.css.CSSStyleDeclaration;
import org.w3c.dom.css.DocumentCSS;
import org.w3c.dom.css.ViewCSS;
import org.w3c.dom.events.DocumentEvent;
import org.w3c.dom.events.Event;
import org.w3c.dom.stylesheets.DocumentStyle;
import org.w3c.dom.stylesheets.StyleSheetList;
import org.w3c.dom.svg.SVGAngle;
import org.w3c.dom.svg.SVGAnimatedBoolean;
import org.w3c.dom.svg.SVGAnimatedLength;
import org.w3c.dom.svg.SVGAnimatedPreserveAspectRatio;
import org.w3c.dom.svg.SVGAnimatedRect;
import org.w3c.dom.svg.SVGElement;
import org.w3c.dom.svg.SVGException;
import org.w3c.dom.svg.SVGLength;
import org.w3c.dom.svg.SVGMatrix;
import org.w3c.dom.svg.SVGNumber;
import org.w3c.dom.svg.SVGPoint;
import org.w3c.dom.svg.SVGRect;
import org.w3c.dom.svg.SVGSVGElement;
import org.w3c.dom.svg.SVGStringList;
import org.w3c.dom.svg.SVGTransform;
import org.w3c.dom.svg.SVGViewSpec;
import org.w3c.dom.views.AbstractView;
import org.w3c.dom.views.DocumentView;
/**
* This class implements {@link org.w3c.dom.svg.SVGSVGElement}.
*
* @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a>
* @version $Id$
*/
public class SVGOMSVGElement
extends SVGStylableElement
implements SVGSVGElement {
/**
* Table mapping XML attribute names to TraitInformation objects.
*/
protected static DoublyIndexedTable xmlTraitInformation;
static {
DoublyIndexedTable t =
new DoublyIndexedTable(SVGStylableElement.xmlTraitInformation);
t.put(null, SVG_X_ATTRIBUTE,
new TraitInformation(true, SVGTypes.TYPE_LENGTH, PERCENTAGE_VIEWPORT_WIDTH));
t.put(null, SVG_Y_ATTRIBUTE,
new TraitInformation(true, SVGTypes.TYPE_LENGTH, PERCENTAGE_VIEWPORT_HEIGHT));
t.put(null, SVG_WIDTH_ATTRIBUTE,
new TraitInformation(true, SVGTypes.TYPE_LENGTH, PERCENTAGE_VIEWPORT_WIDTH));
t.put(null, SVG_HEIGHT_ATTRIBUTE,
new TraitInformation(true, SVGTypes.TYPE_LENGTH, PERCENTAGE_VIEWPORT_HEIGHT));
// t.put(null, SVG_BASE_PROFILE_ATTRIBUTE,
// new TraitInformation(false, SVGTypes.TYPE_CDATA));
// t.put(null, SVG_CONTENT_SCRIPT_TYPE_ATTRIBUTE,
// new TraitInformation(false, SVGTypes.TYPE_CDATA));
// t.put(null, SVG_CONTENT_STYLE_TYPE_ATTRIBUTE,
// new TraitInformation(false, SVGTypes.TYPE_CDATA));
// t.put(null, SVG_VERSION_ATTRIBUTE,
// new TraitInformation(false, SVGTypes.TYPE_CDATA));
t.put(null, SVG_PRESERVE_ASPECT_RATIO_ATTRIBUTE,
new TraitInformation(true, SVGTypes.TYPE_PRESERVE_ASPECT_RATIO_VALUE));
t.put(null, SVG_VIEW_BOX_ATTRIBUTE,
new TraitInformation(true, SVGTypes.TYPE_RECT));
t.put(null, SVG_EXTERNAL_RESOURCES_REQUIRED_ATTRIBUTE,
new TraitInformation(true, SVGTypes.TYPE_BOOLEAN));
xmlTraitInformation = t;
}
/**
* The attribute initializer.
*/
protected static final AttributeInitializer attributeInitializer;
static {
attributeInitializer = new AttributeInitializer(7);
attributeInitializer.addAttribute(XMLSupport.XMLNS_NAMESPACE_URI,
null,
"xmlns",
SVG_NAMESPACE_URI);
attributeInitializer.addAttribute(XMLSupport.XMLNS_NAMESPACE_URI,
"xmlns",
"xlink",
XLinkSupport.XLINK_NAMESPACE_URI);
attributeInitializer.addAttribute(null,
null,
SVG_PRESERVE_ASPECT_RATIO_ATTRIBUTE,
"xMidYMid meet");
attributeInitializer.addAttribute(null,
null,
SVG_ZOOM_AND_PAN_ATTRIBUTE,
SVG_MAGNIFY_VALUE);
attributeInitializer.addAttribute(null,
null,
SVG_VERSION_ATTRIBUTE,
SVG_VERSION);
attributeInitializer.addAttribute(null,
null,
SVG_CONTENT_SCRIPT_TYPE_ATTRIBUTE,
"text/ecmascript");
attributeInitializer.addAttribute(null,
null,
SVG_CONTENT_STYLE_TYPE_ATTRIBUTE,
"text/css");
}
/**
* The 'x' attribute value.
*/
protected SVGOMAnimatedLength x;
/**
* The 'y' attribute value.
*/
protected SVGOMAnimatedLength y;
/**
* The 'width' attribute value.
*/
protected SVGOMAnimatedLength width;
/**
* The 'height' attribute value.
*/
protected SVGOMAnimatedLength height;
/**
* The 'externalResourcesRequired' attribute value.
*/
protected SVGOMAnimatedBoolean externalResourcesRequired;
/**
* The 'preserveAspectRatio' attribute value.
*/
protected SVGOMAnimatedPreserveAspectRatio preserveAspectRatio;
/**
* The 'viewBox' attribute value.
*/
protected SVGOMAnimatedRect viewBox;
/**
* Creates a new SVGOMSVGElement object.
*/
protected SVGOMSVGElement() {
}
/**
* Creates a new SVGOMSVGElement object.
* @param prefix The namespace prefix.
* @param owner The owner document.
*/
public SVGOMSVGElement(String prefix, AbstractDocument owner) {
super(prefix, owner);
initializeLiveAttributes();
}
/**
* Initializes all live attributes for this element.
*/
protected void initializeAllLiveAttributes() {
super.initializeAllLiveAttributes();
initializeLiveAttributes();
}
/**
* Initializes the live attribute values of this element.
*/
private void initializeLiveAttributes() {
x = createLiveAnimatedLength
(null, SVG_X_ATTRIBUTE, SVG_SVG_X_DEFAULT_VALUE,
SVGOMAnimatedLength.HORIZONTAL_LENGTH, false);
y = createLiveAnimatedLength
(null, SVG_Y_ATTRIBUTE, SVG_SVG_Y_DEFAULT_VALUE,
SVGOMAnimatedLength.VERTICAL_LENGTH, false);
width =
createLiveAnimatedLength
(null, SVG_WIDTH_ATTRIBUTE, SVG_SVG_WIDTH_DEFAULT_VALUE,
SVGOMAnimatedLength.HORIZONTAL_LENGTH, true);
height =
createLiveAnimatedLength
(null, SVG_HEIGHT_ATTRIBUTE, SVG_SVG_HEIGHT_DEFAULT_VALUE,
SVGOMAnimatedLength.VERTICAL_LENGTH, true);
externalResourcesRequired =
createLiveAnimatedBoolean
(null, SVG_EXTERNAL_RESOURCES_REQUIRED_ATTRIBUTE, false);
preserveAspectRatio = createLiveAnimatedPreserveAspectRatio();
viewBox = createLiveAnimatedRect(null, SVG_VIEW_BOX_ATTRIBUTE, null);
}
/**
* <b>DOM</b>: Implements {@link Node#getLocalName()}.
*/
public String getLocalName() {
return SVG_SVG_TAG;
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getX()}.
*/
public SVGAnimatedLength getX() {
return x;
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getY()}.
*/
public SVGAnimatedLength getY() {
return y;
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getWidth()}.
*/
public SVGAnimatedLength getWidth() {
return width;
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getHeight()}.
*/
public SVGAnimatedLength getHeight() {
return height;
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getContentScriptType()}.
*/
public String getContentScriptType() {
return getAttributeNS(null, SVG_CONTENT_SCRIPT_TYPE_ATTRIBUTE);
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#setContentScriptType(String)}.
*/
public void setContentScriptType(String type) {
setAttributeNS(null, SVG_CONTENT_SCRIPT_TYPE_ATTRIBUTE, type);
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getContentStyleType()}.
*/
public String getContentStyleType() {
return getAttributeNS(null, SVG_CONTENT_STYLE_TYPE_ATTRIBUTE);
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#setContentStyleType(String)}.
*/
public void setContentStyleType(String type) {
setAttributeNS(null, SVG_CONTENT_STYLE_TYPE_ATTRIBUTE, type);
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getViewport()}.
*/
public SVGRect getViewport() {
SVGContext ctx = getSVGContext();
return new SVGOMRect(0, 0, ctx.getViewportWidth(),
ctx.getViewportHeight());
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getPixelUnitToMillimeterX()}.
*/
public float getPixelUnitToMillimeterX() {
return getSVGContext().getPixelUnitToMillimeter();
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getPixelUnitToMillimeterY()}.
*/
public float getPixelUnitToMillimeterY() {
return getSVGContext().getPixelUnitToMillimeter();
}
/**
* <b>DOM</b>: Implements {@link
* SVGSVGElement#getScreenPixelToMillimeterX()}.
*/
public float getScreenPixelToMillimeterX() {
return getSVGContext().getPixelUnitToMillimeter();
}
/**
* <b>DOM</b>: Implements {@link
* SVGSVGElement#getScreenPixelToMillimeterY()}.
*/
public float getScreenPixelToMillimeterY() {
return getSVGContext().getPixelUnitToMillimeter();
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getUseCurrentView()}.
*/
public boolean getUseCurrentView() {
throw new UnsupportedOperationException
("SVGSVGElement.getUseCurrentView is not implemented"); // XXX
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#setUseCurrentView(boolean)}.
*/
public void setUseCurrentView(boolean useCurrentView) throws DOMException {
throw new UnsupportedOperationException
("SVGSVGElement.setUseCurrentView is not implemented"); // XXX
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getCurrentView()}.
*/
public SVGViewSpec getCurrentView() {
throw new UnsupportedOperationException
("SVGSVGElement.getCurrentView is not implemented"); // XXX
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getCurrentView()}.
*/
public float getCurrentScale() {
AffineTransform scrnTrans = getSVGContext().getScreenTransform();
if (scrnTrans != null) {
return (float)Math.sqrt(scrnTrans.getDeterminant());
}
return 1;
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#setCurrentScale(float)}.
*/
public void setCurrentScale(float currentScale) throws DOMException {
SVGContext context = getSVGContext();
AffineTransform scrnTrans = context.getScreenTransform();
float scale = 1;
if (scrnTrans != null) {
scale = (float)Math.sqrt(scrnTrans.getDeterminant());
}
float delta = currentScale/scale;
// The way currentScale, currentTranslate are defined
// changing scale has no effect on translate.
scrnTrans = new AffineTransform
(scrnTrans.getScaleX()*delta, scrnTrans.getShearY()*delta,
scrnTrans.getShearX()*delta, scrnTrans.getScaleY()*delta,
scrnTrans.getTranslateX(), scrnTrans.getTranslateY());
context.setScreenTransform(scrnTrans);
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getCurrentTranslate()}.
*/
public SVGPoint getCurrentTranslate() {
return new SVGPoint() {
protected AffineTransform getScreenTransform() {
SVGContext context = getSVGContext();
return context.getScreenTransform();
}
public float getX() {
AffineTransform scrnTrans = getScreenTransform();
return (float)scrnTrans.getTranslateX();
}
public float getY() {
AffineTransform scrnTrans = getScreenTransform();
return (float)scrnTrans.getTranslateY();
}
public void setX(float newX) {
SVGContext context = getSVGContext();
AffineTransform scrnTrans = context.getScreenTransform();
scrnTrans = new AffineTransform
(scrnTrans.getScaleX(), scrnTrans.getShearY(),
scrnTrans.getShearX(), scrnTrans.getScaleY(),
newX, scrnTrans.getTranslateY());
context.setScreenTransform(scrnTrans);
}
public void setY(float newY) {
SVGContext context = getSVGContext();
AffineTransform scrnTrans = context.getScreenTransform();
scrnTrans = new AffineTransform
(scrnTrans.getScaleX(), scrnTrans.getShearY(),
scrnTrans.getShearX(), scrnTrans.getScaleY(),
scrnTrans.getTranslateX(), newY);
context.setScreenTransform(scrnTrans);
}
public SVGPoint matrixTransform(SVGMatrix mat) {
AffineTransform scrnTrans = getScreenTransform();
float x = (float)scrnTrans.getTranslateX();
float y = (float)scrnTrans.getTranslateY();
float newX = mat.getA() * x + mat.getC() * y + mat.getE();
float newY = mat.getB() * x + mat.getD() * y + mat.getF();
return new SVGOMPoint(newX, newY);
}
};
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#suspendRedraw(int)}.
*/
public int suspendRedraw(int max_wait_milliseconds) {
if (max_wait_milliseconds > 60000) {
max_wait_milliseconds = 60000;
} else if (max_wait_milliseconds < 0) {
max_wait_milliseconds = 0;
}
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
return ctx.suspendRedraw(max_wait_milliseconds);
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#unsuspendRedraw(int)}.
*/
public void unsuspendRedraw(int suspend_handle_id) throws DOMException {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
if (!ctx.unsuspendRedraw(suspend_handle_id)) {
throw createDOMException
(DOMException.NOT_FOUND_ERR, "invalid.suspend.handle",
new Object[] {suspend_handle_id});
}
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#unsuspendRedrawAll()}.
*/
public void unsuspendRedrawAll() {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
ctx.unsuspendRedrawAll();
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#forceRedraw()}.
*/
public void forceRedraw() {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
ctx.forceRedraw();
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#pauseAnimations()}.
*/
public void pauseAnimations() {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
ctx.pauseAnimations();
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#unpauseAnimations()}.
*/
public void unpauseAnimations() {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
ctx.unpauseAnimations();
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#animationsPaused()}.
*/
public boolean animationsPaused() {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
return ctx.animationsPaused();
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getCurrentTime()}.
*/
public float getCurrentTime() {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
return ctx.getCurrentTime();
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#setCurrentTime(float)}.
*/
public void setCurrentTime(float seconds) {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
ctx.setCurrentTime(seconds);
}
/**
* <b>DOM</b>: Implements {@link
* SVGSVGElement#getIntersectionList(SVGRect,SVGElement)}.
*/
public NodeList getIntersectionList(SVGRect rect,
SVGElement referenceElement) {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
List list = ctx.getIntersectionList(rect, referenceElement);
return new ListNodeList(list);
}
/**
* <b>DOM</b>: Implements {@link
* SVGSVGElement#getEnclosureList(SVGRect,SVGElement)}.
*/
public NodeList getEnclosureList(SVGRect rect,
SVGElement referenceElement) {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
List list = ctx.getEnclosureList(rect, referenceElement);
return new ListNodeList(list);
}
/**
* <b>DOM</b>: Implements {@link
* SVGSVGElement#checkIntersection(SVGElement,SVGRect)}.
*/
public boolean checkIntersection(SVGElement element, SVGRect rect) {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
return ctx.checkIntersection(element, rect);
}
/**
* <b>DOM</b>: Implements {@link
* SVGSVGElement#checkEnclosure(SVGElement,SVGRect)}.
*/
public boolean checkEnclosure(SVGElement element, SVGRect rect) {
SVGSVGContext ctx = (SVGSVGContext)getSVGContext();
return ctx.checkEnclosure(element, rect);
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#deselectAll()}.
*/
public void deselectAll() {
((SVGSVGContext)getSVGContext()).deselectAll();
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#createSVGNumber()}.
*/
public SVGNumber createSVGNumber() {
return new SVGNumber() {
protected float value;
public float getValue() {
return value;
}
public void setValue(float f) {
value = f;
}
};
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#createSVGLength()}.
*/
public SVGLength createSVGLength() {
return new SVGOMLength(this);
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#createSVGAngle()}.
*/
public SVGAngle createSVGAngle() {
return new SVGOMAngle();
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#createSVGPoint()}.
*/
public SVGPoint createSVGPoint() {
return new SVGOMPoint(0, 0);
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#createSVGMatrix()}.
*/
public SVGMatrix createSVGMatrix() {
return new AbstractSVGMatrix() {
protected AffineTransform at = new AffineTransform();
protected AffineTransform getAffineTransform() {
return at;
}
};
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#createSVGRect()}.
*/
public SVGRect createSVGRect() {
return new SVGOMRect(0, 0, 0, 0);
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#createSVGTransform()}.
*/
public SVGTransform createSVGTransform() {
SVGOMTransform ret = new SVGOMTransform();
ret.setType(SVGTransform.SVG_TRANSFORM_MATRIX);
return ret;
}
/**
* <b>DOM</b>: Implements {@link
* SVGSVGElement#createSVGTransformFromMatrix(SVGMatrix)}.
*/
public SVGTransform createSVGTransformFromMatrix(SVGMatrix matrix) {
SVGOMTransform tr = new SVGOMTransform();
tr.setMatrix(matrix);
return tr;
}
/**
* <b>DOM</b>: Implements {@link SVGSVGElement#getElementById(String)}.
*/
public Element getElementById(String elementId) {
return ownerDocument.getChildElementById(this, elementId);
}
// SVGLocatable ///////////////////////////////////////////////////////
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGLocatable#getNearestViewportElement()}.
*/
public SVGElement getNearestViewportElement() {
return SVGLocatableSupport.getNearestViewportElement(this);
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGLocatable#getFarthestViewportElement()}.
*/
public SVGElement getFarthestViewportElement() {
return SVGLocatableSupport.getFarthestViewportElement(this);
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGLocatable#getBBox()}.
*/
public SVGRect getBBox() {
return SVGLocatableSupport.getBBox(this);
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGLocatable#getCTM()}.
*/
public SVGMatrix getCTM() {
return SVGLocatableSupport.getCTM(this);
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGLocatable#getScreenCTM()}.
*/
public SVGMatrix getScreenCTM() {
return SVGLocatableSupport.getScreenCTM(this);
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGLocatable#getTransformToElement(SVGElement)}.
*/
public SVGMatrix getTransformToElement(SVGElement element)
throws SVGException {
return SVGLocatableSupport.getTransformToElement(this, element);
}
// ViewCSS ////////////////////////////////////////////////////////////////
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.views.AbstractView#getDocument()}.
*/
public DocumentView getDocument() {
return (DocumentView)getOwnerDocument();
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.css.ViewCSS#getComputedStyle(Element,String)}.
*/
public CSSStyleDeclaration getComputedStyle(Element elt,
String pseudoElt) {
AbstractView av = ((DocumentView)getOwnerDocument()).getDefaultView();
return ((ViewCSS)av).getComputedStyle(elt, pseudoElt);
}
// DocumentEvent /////////////////////////////////////////////////////////
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.events.DocumentEvent#createEvent(String)}.
*/
public Event createEvent(String eventType) throws DOMException {
return ((DocumentEvent)getOwnerDocument()).createEvent(eventType);
}
/**
* <b>DOM</b>: Implements
* org.w3c.dom.events.DocumentEvent#canDispatch(String,String).
*/
public boolean canDispatch(String namespaceURI, String type)
throws DOMException {
AbstractDocument doc = (AbstractDocument) getOwnerDocument();
return doc.canDispatch(namespaceURI, type);
}
// DocumentCSS ////////////////////////////////////////////////////////////
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.stylesheets.DocumentStyle#getStyleSheets()}.
*/
public StyleSheetList getStyleSheets() {
return ((DocumentStyle)getOwnerDocument()).getStyleSheets();
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.css.DocumentCSS#getOverrideStyle(Element,String)}.
*/
public CSSStyleDeclaration getOverrideStyle(Element elt,
String pseudoElt) {
return ((DocumentCSS)getOwnerDocument()).getOverrideStyle(elt,
pseudoElt);
}
// SVGLangSpace support //////////////////////////////////////////////////
/**
* <b>DOM</b>: Returns the xml:lang attribute value.
*/
public String getXMLlang() {
return XMLSupport.getXMLLang(this);
}
/**
* <b>DOM</b>: Sets the xml:lang attribute value.
*/
public void setXMLlang(String lang) {
setAttributeNS(XML_NAMESPACE_URI, XML_LANG_QNAME, lang);
}
/**
* <b>DOM</b>: Returns the xml:space attribute value.
*/
public String getXMLspace() {
return XMLSupport.getXMLSpace(this);
}
/**
* <b>DOM</b>: Sets the xml:space attribute value.
*/
public void setXMLspace(String space) {
setAttributeNS(XML_NAMESPACE_URI, XML_SPACE_QNAME, space);
}
// SVGZoomAndPan support ///////////////////////////////////////////////
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGZoomAndPan#getZoomAndPan()}.
*/
public short getZoomAndPan() {
return SVGZoomAndPanSupport.getZoomAndPan(this);
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGZoomAndPan#getZoomAndPan()}.
*/
public void setZoomAndPan(short val) {
SVGZoomAndPanSupport.setZoomAndPan(this, val);
}
// SVGFitToViewBox support ////////////////////////////////////////////
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGFitToViewBox#getViewBox()}.
*/
public SVGAnimatedRect getViewBox() {
return viewBox;
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGFitToViewBox#getPreserveAspectRatio()}.
*/
public SVGAnimatedPreserveAspectRatio getPreserveAspectRatio() {
return preserveAspectRatio;
}
// SVGExternalResourcesRequired support /////////////////////////////
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGExternalResourcesRequired#getExternalResourcesRequired()}.
*/
public SVGAnimatedBoolean getExternalResourcesRequired() {
return externalResourcesRequired;
}
// SVGTests support ///////////////////////////////////////////////////
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGTests#getRequiredFeatures()}.
*/
public SVGStringList getRequiredFeatures() {
return SVGTestsSupport.getRequiredFeatures(this);
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGTests#getRequiredExtensions()}.
*/
public SVGStringList getRequiredExtensions() {
return SVGTestsSupport.getRequiredExtensions(this);
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGTests#getSystemLanguage()}.
*/
public SVGStringList getSystemLanguage() {
return SVGTestsSupport.getSystemLanguage(this);
}
/**
* <b>DOM</b>: Implements {@link
* org.w3c.dom.svg.SVGTests#hasExtension(String)}.
*/
public boolean hasExtension(String extension) {
return SVGTestsSupport.hasExtension(this, extension);
}
/**
* Returns the AttributeInitializer for this element type.
* @return null if this element has no attribute with a default value.
*/
protected AttributeInitializer getAttributeInitializer() {
return attributeInitializer;
}
/**
* Returns a new uninitialized instance of this object's class.
*/
protected Node newNode() {
return new SVGOMSVGElement();
}
/**
* Returns the table of TraitInformation objects for this element.
*/
protected DoublyIndexedTable getTraitInformationTable() {
return xmlTraitInformation;
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.collect.nestedset;
import static java.util.stream.Collectors.joining;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.collect.compacthashset.CompactHashSet;
import java.util.AbstractCollection;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import javax.annotation.Nullable;
/**
* A list-like iterable that supports efficient nesting.
*
* @see NestedSetBuilder
*/
@SuppressWarnings("unchecked")
public final class NestedSet<E> implements Iterable<E> {
private final Order order;
private final Object children;
private byte[] memo;
private static final byte[] LEAF_MEMO = {};
private static final Object[] EMPTY_CHILDREN = {};
/**
* Construct an empty NestedSet. Should only be called by Order's class initializer.
*/
NestedSet(Order order) {
this.order = order;
this.children = EMPTY_CHILDREN;
this.memo = LEAF_MEMO;
}
NestedSet(Order order, Set<E> direct, Set<NestedSet<E>> transitive) {
this.order = order;
// The iteration order of these collections is the order in which we add the items.
Collection<E> directOrder = direct;
Collection<NestedSet<E>> transitiveOrder = transitive;
// True if we visit the direct members before the transitive members.
boolean preorder;
switch(order) {
case LINK_ORDER:
directOrder = ImmutableList.copyOf(direct).reverse();
transitiveOrder = ImmutableList.copyOf(transitive).reverse();
preorder = false;
break;
case STABLE_ORDER:
case COMPILE_ORDER:
preorder = false;
break;
case NAIVE_LINK_ORDER:
preorder = true;
break;
default:
throw new AssertionError(order);
}
// Remember children we extracted from one-element subsets.
// Otherwise we can end up with two of the same child, which is a
// problem for the fast path in toList().
Set<E> alreadyInserted = ImmutableSet.of();
// The candidate array of children.
Object[] children = new Object[direct.size() + transitive.size()];
int n = 0; // current position in children
boolean leaf = true; // until we find otherwise
for (int pass = 0; pass <= 1; ++pass) {
if ((pass == 0) == preorder && !direct.isEmpty()) {
for (E member : directOrder) {
if (member instanceof Object[]) {
throw new IllegalArgumentException("cannot store Object[] in NestedSet");
}
if (!alreadyInserted.contains(member)) {
children[n++] = member;
}
}
alreadyInserted = direct;
} else if ((pass == 1) == preorder && !transitive.isEmpty()) {
CompactHashSet<E> hoisted = CompactHashSet.create();
for (NestedSet<E> subset : transitiveOrder) {
Object c = subset.children;
if (c instanceof Object[]) {
Object[] a = (Object[]) c;
if (a.length < 2) {
throw new AssertionError(a.length);
}
children[n++] = a;
leaf = false;
} else {
if (!alreadyInserted.contains(c) && hoisted.add((E) c)) {
children[n++] = c;
}
}
}
alreadyInserted = hoisted;
}
}
// If we ended up wrapping exactly one item or one other set, dereference it.
if (n == 1) {
this.children = children[0];
} else if (n == 0) {
this.children = EMPTY_CHILDREN;
} else if (n < children.length) {
this.children = Arrays.copyOf(children, n);
} else {
this.children = children;
}
if (leaf) {
this.memo = LEAF_MEMO;
}
}
/**
* Returns the ordering of this nested set.
*/
public Order getOrder() {
return order;
}
/**
* Returns the internal item or array. For use by NestedSetVisitor and NestedSetView. Those two
* classes also have knowledge of the internal implementation of NestedSet.
*/
Object rawChildren() {
return children;
}
/**
* Returns true if the set is empty. Runs in O(1) time (i.e. does not flatten the set).
*/
public boolean isEmpty() {
return children == EMPTY_CHILDREN;
}
/**
* Returns true if the set has exactly one element.
*/
private boolean isSingleton() {
return !(children instanceof Object[]);
}
/**
* Returns a collection of all unique elements of this set (including subsets)
* in an implementation-specified order as a {@code Collection}.
*
* <p>If you do not need a Collection and an Iterable is enough, use the
* nested set itself as an Iterable.
*/
public Collection<E> toCollection() {
return toList();
}
/**
* Returns a collection of all unique elements of this set (including subsets)
* in an implementation-specified order as a {code List}.
*
* <p>Use {@link #toCollection} when possible for better efficiency.
*/
public List<E> toList() {
if (isSingleton()) {
return ImmutableList.of((E) children);
}
if (isEmpty()) {
return ImmutableList.of();
}
return order == Order.LINK_ORDER ? expand().reverse() : expand();
}
/**
* Returns a collection of all unique elements of this set (including subsets)
* in an implementation-specified order as a {@code Set}.
*
* <p>Use {@link #toCollection} when possible for better efficiency.
*/
public Set<E> toSet() {
return ImmutableSet.copyOf(toList());
}
/**
* Returns true if this set is equal to {@code other} based on the top-level
* elements and object identity (==) of direct subsets. As such, this function
* can fail to equate {@code this} with another {@code NestedSet} that holds
* the same elements. It will never fail to detect that two {@code NestedSet}s
* are different, however.
*
* @param other the {@code NestedSet} to compare against.
*/
public boolean shallowEquals(@Nullable NestedSet<? extends E> other) {
if (this == other) {
return true;
}
return other != null
&& order == other.order
&& (children.equals(other.children)
|| (!isSingleton() && !other.isSingleton()
&& Arrays.equals((Object[]) children, (Object[]) other.children)));
}
/**
* Returns a hash code that produces a notion of identity that is consistent with
* {@link #shallowEquals}. In other words, if two {@code NestedSet}s are equal according
* to {@code #shallowEquals}, then they return the same {@code shallowHashCode}.
*
* <p>The main reason for having these separate functions instead of reusing
* the standard equals/hashCode is to minimize accidental use, since they are
* different from both standard Java objects and collection-like objects.
*/
public int shallowHashCode() {
return isSingleton()
? Objects.hash(order, children)
: Objects.hash(order, Arrays.hashCode((Object[]) children));
}
@Override
public String toString() {
return isSingleton() ? "{" + children + "}" : childrenToString(children);
}
// TODO: this leaves LINK_ORDER backwards
private static String childrenToString(Object children) {
if (children instanceof Object[]) {
return Arrays.stream((Object[]) children)
.map(NestedSet::childrenToString)
.collect(joining(", ", "{", "}"));
} else {
return children.toString();
}
}
private enum Stringer implements Function<Object, String> {
INSTANCE;
@Override public String apply(Object o) {
return childrenToString(o);
}
}
@Override
public Iterator<E> iterator() {
// TODO: would it help to have a proper lazy iterator? seems like it might reduce garbage.
return toCollection().iterator();
}
/**
* Implementation of {@link #toList}. Uses one of three strategies based on the value of
* {@code this.memo}: wrap our direct items in a list, call {@link #lockedExpand} to perform
* the initial {@link #walk}, or call {@link #replay} if we have a nontrivial memo.
*/
private ImmutableList<E> expand() {
// This value is only set in the constructor, so safe to test here with no lock.
if (memo == LEAF_MEMO) {
return ImmutableList.copyOf(new ArraySharingCollection<>((Object[]) children));
}
CompactHashSet<E> members = lockedExpand();
if (members != null) {
return ImmutableList.copyOf(members);
}
Object[] children = (Object[]) this.children;
// TODO: We could record the exact size (inside memo, or by making order an int with two bits
// for Order.ordinal()) and avoid an array copy here. It's not directly visible in profiles but
// it would reduce garbage generated.
ImmutableList.Builder<E> output = ImmutableList.builder();
replay(output, children, memo, 0);
return output.build();
}
// Hack to share our internal array with ImmutableList/ImmutableSet, or avoid
// a copy in cases where we can preallocate an array of the correct size.
private static final class ArraySharingCollection<E> extends AbstractCollection<E> {
private final Object[] array;
ArraySharingCollection(Object[] array) {
this.array = array;
}
@Override public Object[] toArray() {
return array;
}
@Override public int size() {
return array.length;
}
@Override public Iterator<E> iterator() {
throw new UnsupportedOperationException();
}
}
/**
* If this is the first call for this object, fills {@code this.memo} and returns a set from
* {@link #walk}. Otherwise returns null; the caller should use {@link #replay} instead.
*/
private synchronized CompactHashSet<E> lockedExpand() {
if (memo != null) {
return null;
}
Object[] children = (Object[]) this.children;
CompactHashSet<E> members = CompactHashSet.createWithExpectedSize(128);
CompactHashSet<Object> sets = CompactHashSet.createWithExpectedSize(128);
sets.add(children);
memo = new byte[Math.min((children.length + 7) / 8, 8)];
int pos = walk(sets, members, children, 0);
int bytes = (pos + 7) / 8;
if (bytes <= memo.length - 16) {
memo = Arrays.copyOf(memo, bytes);
}
return members;
}
/**
* Perform a depth-first traversal of {@code children}, tracking visited
* arrays in {@code sets} and visited leaves in {@code members}. We also
* record which edges were taken in {@code this.memo} starting at {@code pos}.
*
* Returns the final value of {@code pos}.
*/
private int walk(CompactHashSet<Object> sets, CompactHashSet<E> members,
Object[] children, int pos) {
for (Object child : children) {
if ((pos >> 3) >= memo.length) {
memo = Arrays.copyOf(memo, memo.length * 2);
}
if (child instanceof Object[]) {
if (sets.add(child)) {
int prepos = pos;
int presize = members.size();
pos = walk(sets, members, (Object[]) child, pos + 1);
if (presize < members.size()) {
memo[prepos >> 3] |= (byte) (1 << (prepos & 7));
} else {
// We didn't find any new nodes, so don't mark this branch as taken.
// Rewind pos. The rest of the array is still zeros because no one
// deeper in the traversal set any bits.
pos = prepos + 1;
}
} else {
++pos;
}
} else {
if (members.add((E) child)) {
memo[pos >> 3] |= (byte) (1 << (pos & 7));
}
++pos;
}
}
return pos;
}
/**
* Repeat a previous traversal of {@code children} performed by {@link #walk}
* and recorded in {@code memo}, appending leaves to {@code output}.
*/
private static <E> int replay(ImmutableList.Builder<E> output, Object[] children,
byte[] memo, int pos) {
for (Object child : children) {
if ((memo[pos >> 3] & (1 << (pos & 7))) != 0) {
if (child instanceof Object[]) {
pos = replay(output, (Object[]) child, memo, pos + 1);
} else {
output.add((E) child);
++pos;
}
} else {
++pos;
}
}
return pos;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.jmeter.protocol.http.control;
import java.io.Serializable;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.http.conn.DnsResolver;
import org.apache.http.impl.conn.SystemDefaultDnsResolver;
import org.apache.jmeter.config.ConfigTestElement;
import org.apache.jmeter.engine.event.LoopIterationEvent;
import org.apache.jmeter.testelement.TestIterationListener;
import org.apache.jmeter.testelement.property.BooleanProperty;
import org.apache.jmeter.testelement.property.CollectionProperty;
import org.apache.jmeter.testelement.property.JMeterProperty;
import org.apache.jmeter.testelement.property.NullProperty;
import org.apache.jmeter.testelement.property.PropertyIterator;
import org.apache.jmeter.testelement.property.TestElementProperty;
import org.apache.jmeter.threads.JMeterContextService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xbill.DNS.ARecord;
import org.xbill.DNS.Cache;
import org.xbill.DNS.ExtendedResolver;
import org.xbill.DNS.Lookup;
import org.xbill.DNS.Record;
import org.xbill.DNS.Resolver;
import org.xbill.DNS.TextParseException;
import org.xbill.DNS.Type;
/**
* This config element provides ability to have flexible control over DNS
* caching function. Depending on option from @see
* {@link org.apache.jmeter.protocol.http.gui.DNSCachePanel}, either system or
* custom resolver can be used. Custom resolver uses dnsjava library, and gives
* ability to bypass both OS and JVM cache. It allows to use paradigm
* "1 virtual user - 1 DNS cache" in performance tests.
*
* @since 2.12
*/
public class DNSCacheManager extends ConfigTestElement implements TestIterationListener, Serializable, DnsResolver {
private static final long serialVersionUID = 2122L;
private static final Logger log = LoggerFactory.getLogger(DNSCacheManager.class);
public static final boolean DEFAULT_CLEAR_CACHE_EACH_ITER = false;
//++ JMX tag values
private static final String CLEAR_CACHE_EACH_ITER = "DNSCacheManager.clearEachIteration"; // $NON-NLS-1$
private static final String SERVERS = "DNSCacheManager.servers"; // $NON-NLS-1$
private static final String HOSTS = "DNSCacheManager.hosts"; // $NON-NLS-1$
private static final String IS_CUSTOM_RESOLVER = "DNSCacheManager.isCustomResolver"; // $NON-NLS-1$
//-- JMX tag values
private static final boolean DEFAULT_IS_CUSTOM_RESOLVER = false;
private final transient Cache lookupCache;
private final transient SystemDefaultDnsResolver systemDefaultDnsResolver;
final Map<String, InetAddress[]> cache;
transient Resolver resolver;
private transient int timeoutMs;
transient boolean initFailed;
// ensure that the initial DNSServers are copied to the per-thread instances
public DNSCacheManager() {
setProperty(new CollectionProperty(SERVERS, new ArrayList<String>()));
this.systemDefaultDnsResolver = new SystemDefaultDnsResolver();
this.cache = new LinkedHashMap<>();
//disabling cache
lookupCache = new Cache();
lookupCache.setMaxCache(0);
lookupCache.setMaxEntries(0);
}
/**
* {@inheritDoc}
*/
@Override
public Object clone() {
DNSCacheManager clone = (DNSCacheManager) super.clone();
clone.resolver = createResolver();
return clone;
}
/**
* @return {@link Resolver}
*/
private Resolver createResolver() {
CollectionProperty dnsServers = getServers();
try {
String[] serverNames = new String[dnsServers.size()];
int index = 0;
for (JMeterProperty jMeterProperty : dnsServers) {
serverNames[index] = jMeterProperty.getStringValue();
index++;
}
ExtendedResolver result = new ExtendedResolver(serverNames);
if (log.isDebugEnabled()) {
log.debug("Using DNS Resolvers: {}", Arrays.asList(result.getResolvers()));
}
// resolvers will be chosen via round-robin
result.setLoadBalance(true);
return result;
} catch (UnknownHostException uhe) {
this.initFailed = true;
log.warn("Failed to create Extended resolver: {}", uhe.getMessage(), uhe);
return null;
}
}
/**
*
* Resolves address using system or custom DNS resolver
*/
@Override
public InetAddress[] resolve(String host) throws UnknownHostException {
InetAddress[] result = cache.get(host);
// cache may contain null.
// A return value of null does not necessarily
// indicate that the map contains no mapping
// for the key; it's also possible that the map
// explicitly maps the key to null
// https://docs.oracle.com/javase/8/docs/api/java/util/LinkedHashMap.html
if (result != null || cache.containsKey(host)) {
if (log.isDebugEnabled()) {
log.debug("Cache hit thr#{}: {} => {}", JMeterContextService.getContext().getThreadNum(), host,
Arrays.toString(result));
}
return result;
} else if (isStaticHost(host)) {
InetAddress[] staticAddresses = fromStaticHost(host);
if (log.isDebugEnabled()) {
log.debug("Cache miss thr#{}: (static) {} => {}", JMeterContextService.getContext().getThreadNum(), host,
Arrays.toString(staticAddresses));
}
cache.put(host, staticAddresses);
return staticAddresses;
} else {
InetAddress[] addresses = requestLookup(host);
if (log.isDebugEnabled()) {
log.debug("Cache miss thr#{}: {} => {}", JMeterContextService.getContext().getThreadNum(), host,
Arrays.toString(addresses));
}
cache.put(host, addresses);
return addresses;
}
}
private boolean isStaticHost(String host) {
JMeterProperty p = getProperty(HOSTS);
if (p instanceof NullProperty) {
removeProperty(HOSTS);
return false;
}
CollectionProperty property = (CollectionProperty) p;
PropertyIterator iterator = property.iterator();
while (iterator.hasNext()) {
TestElementProperty possibleEntry = (TestElementProperty) iterator.next();
if (log.isDebugEnabled()) {
log.debug("Look for {} at {}: {}", host, possibleEntry.getObjectValue(), possibleEntry.getObjectValue().getClass());
}
StaticHost entry = (StaticHost) possibleEntry.getObjectValue();
if (entry.getName().equalsIgnoreCase(host)) {
if (log.isDebugEnabled()) {
log.debug("Found static host: {} => {}", host, entry.getAddress());
}
return true;
}
}
log.debug("No static host found for {}", host);
return false;
}
private InetAddress[] fromStaticHost(String host) {
JMeterProperty p = getProperty(HOSTS);
if (p instanceof NullProperty) {
removeProperty(HOSTS);
return new InetAddress[0];
}
CollectionProperty property = (CollectionProperty) p;
PropertyIterator iterator = property.iterator();
while (iterator.hasNext()) {
StaticHost entry = (StaticHost) ((TestElementProperty)iterator.next()).getObjectValue();
if (entry.getName().equals(host)) {
List<InetAddress> addresses = new ArrayList<>();
for (String address : Arrays.asList(entry.getAddress().split("\\s*,\\s*"))) {
try {
final InetAddress[] requestLookup = requestLookup(address);
if (requestLookup == null) {
addAsLiteralAddress(addresses, address);
} else {
addresses.addAll(Arrays.asList(requestLookup));
}
} catch (UnknownHostException e) {
addAsLiteralAddress(addresses, address);
log.warn("Couldn't resolve static address {} for host {}", address, host, e);
}
}
return addresses.toArray(new InetAddress[addresses.size()]);
}
}
return new InetAddress[0];
}
private void addAsLiteralAddress(List<InetAddress> addresses,
String address) {
try {
addresses.add(InetAddress.getByName(address));
} catch (UnknownHostException e) {
log.info("Couldn't convert {} as literal address to InetAddress", address, e);
}
}
/**
* Sends DNS request via system or custom DNS resolver
* @param host Host
* @return array of {@link InetAddress} or null if lookup did not return result
*/
private InetAddress[] requestLookup(String host) throws UnknownHostException {
InetAddress[] addresses = null;
if (isCustomResolver()) {
ExtendedResolver extendedResolver = getResolver();
if (extendedResolver != null) {
if(extendedResolver.getResolvers().length > 0) {
try {
Lookup lookup = new Lookup(host, Type.A);
lookup.setCache(lookupCache);
if (timeoutMs > 0) {
resolver.setTimeout(timeoutMs / 1000, timeoutMs % 1000);
}
lookup.setResolver(resolver);
Record[] records = lookup.run();
if (records == null || records.length == 0) {
throw new UnknownHostException("Failed to resolve host name: " + host);
}
addresses = new InetAddress[records.length];
for (int i = 0; i < records.length; i++) {
addresses[i] = ((ARecord) records[i]).getAddress();
}
} catch (TextParseException tpe) {
log.debug("Failed to create Lookup object: " + tpe);
}
return addresses;
}
} else {
throw new UnknownHostException("Could not resolve host:"+host
+", failed to initialize resolver"
+ " or no resolver found");
}
}
addresses = systemDefaultDnsResolver.resolve(host);
if (log.isDebugEnabled()) {
log.debug("Cache miss: {} Thread #{}, resolved with system resolver into {}", host,
JMeterContextService.getContext().getThreadNum(), Arrays.toString(addresses));
}
return addresses;
}
/**
* Tries to initialize resolver , otherwise sets initFailed to true
* @return ExtendedResolver if init succeeded or null otherwise
*/
private ExtendedResolver getResolver() {
if(resolver == null && !initFailed) {
resolver = createResolver();
}
return (ExtendedResolver) resolver;
}
/**
* {@inheritDoc} Clean DNS cache if appropriate check-box was selected
*/
@Override
public void testIterationStart(LoopIterationEvent event) {
if (isClearEachIteration()) {
this.cache.clear();
}
}
/**
* {@inheritDoc}
*/
@Override
public void clear() {
super.clear();
clearServers(); // ensure data is set up OK initially
clearHosts();
this.cache.clear();
this.initFailed = false;
this.resolver = null;
}
/**
* Remove all the servers.
*/
private void clearServers() {
log.debug("Clear all servers from store");
setProperty(new CollectionProperty(SERVERS, new ArrayList<String>()));
}
/**
* Add DNS Server
* @param dnsServer DNS Server
*/
public void addServer(String dnsServer) {
getServers().addItem(dnsServer);
}
/**
* @return DNS Servers
*/
public CollectionProperty getServers() {
return (CollectionProperty) getProperty(SERVERS);
}
/**
* Clear static hosts
*/
private void clearHosts() {
log.debug("Clear all hosts from store");
removeProperty(HOSTS);
cache.clear();
}
/**
* Add static host
* @param dnsHost DNS host
* @param addresses Comma separated list of addresses
*/
public void addHost(String dnsHost, String addresses) {
getHosts().addItem(new StaticHost(dnsHost, addresses));
cache.clear();
}
public CollectionProperty getHosts() {
if (getProperty(HOSTS) instanceof NullProperty) {
setProperty(new CollectionProperty(HOSTS, new ArrayList<StaticHost>()));
}
return (CollectionProperty) getProperty(HOSTS);
}
/**
* Clean DNS cache each iteration
*
* @return boolean
*/
public boolean isClearEachIteration() {
return this.getPropertyAsBoolean(CLEAR_CACHE_EACH_ITER, DEFAULT_CLEAR_CACHE_EACH_ITER);
}
/**
* Clean DNS cache each iteration
*
* @param clear
* flag whether DNS cache should be cleared on each iteration
*/
public void setClearEachIteration(boolean clear) {
setProperty(new BooleanProperty(CLEAR_CACHE_EACH_ITER, clear));
}
public boolean isCustomResolver() {
return this.getPropertyAsBoolean(IS_CUSTOM_RESOLVER, DEFAULT_IS_CUSTOM_RESOLVER);
}
public void setCustomResolver(boolean isCustomResolver) {
this.setProperty(IS_CUSTOM_RESOLVER, isCustomResolver);
}
/**
* Sets DNS resolution timeout.
*
* @param timeoutMs timeout in milliseconds
*/
void setTimeoutMs(int timeoutMs) {
this.timeoutMs = timeoutMs;
}
/**
* Returns DNS resolution timeout in milliseconds.
*
* @return DNS resolution timeout in milliseconds
*/
int getTimeoutMs() {
return timeoutMs;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.projectView.impl;
import com.intellij.ide.PsiCopyPasteManager;
import com.intellij.ide.projectView.BaseProjectTreeBuilder;
import com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode;
import com.intellij.ide.ui.customization.CustomizationUtil;
import com.intellij.ide.util.treeView.*;
import com.intellij.lang.LangBundle;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.ActionPlaces;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.actionSystem.impl.ActionMenu;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.TreeSpeedSearch;
import com.intellij.ui.stripe.ErrorStripe;
import com.intellij.ui.stripe.ErrorStripePainter;
import com.intellij.ui.stripe.TreeUpdater;
import com.intellij.util.EditSourceOnDoubleClickHandler;
import com.intellij.util.EditSourceOnEnterKeyHandler;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import com.intellij.util.ui.update.Activatable;
import com.intellij.util.ui.update.UiNotifyConnector;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import java.awt.*;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.StringTokenizer;
public abstract class AbstractProjectViewPSIPane extends AbstractProjectViewPane {
private AsyncProjectViewSupport myAsyncSupport;
private JScrollPane myComponent;
protected AbstractProjectViewPSIPane(@NotNull Project project) {
super(project);
}
@NotNull
@Override
public JComponent createComponent() {
if (myComponent != null) {
SwingUtilities.updateComponentTreeUI(myComponent);
return myComponent;
}
DefaultMutableTreeNode rootNode = new DefaultMutableTreeNode(null);
DefaultTreeModel treeModel = new DefaultTreeModel(rootNode);
myTree = createTree(treeModel);
enableDnD();
myComponent = ScrollPaneFactory.createScrollPane(myTree);
if (Registry.is("error.stripe.enabled")) {
ErrorStripePainter painter = new ErrorStripePainter(true);
Disposer.register(this, new TreeUpdater<>(painter, myComponent, myTree) {
@Override
protected void update(ErrorStripePainter painter, int index, Object object) {
if (object instanceof DefaultMutableTreeNode) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)object;
object = node.getUserObject();
}
super.update(painter, index, getStripe(object, myTree.isExpanded(index)));
}
});
}
myTreeStructure = createStructure();
BaseProjectTreeBuilder treeBuilder = createBuilder(treeModel);
if (treeBuilder != null) {
installComparator(treeBuilder);
setTreeBuilder(treeBuilder);
}
else {
myAsyncSupport = new AsyncProjectViewSupport(this, myProject, myTreeStructure, createComparator());
myAsyncSupport.setModelTo(myTree);
}
initTree();
Disposer.register(this, new UiNotifyConnector(myTree, new Activatable() {
private boolean showing;
@Override
public void showNotify() {
if (!showing) {
showing = true;
restoreExpandedPaths();
}
}
@Override
public void hideNotify() {
if (showing) {
showing = false;
saveExpandedPaths();
}
}
}));
return myComponent;
}
@Override
protected void installComparator(AbstractTreeBuilder builder, @NotNull Comparator<? super NodeDescriptor<?>> comparator) {
if (myAsyncSupport != null) {
myAsyncSupport.setComparator(comparator);
}
super.installComparator(builder, comparator);
}
@Override
public final void dispose() {
myAsyncSupport = null;
myComponent = null;
super.dispose();
}
private void initTree() {
myTree.getSelectionModel().setSelectionMode(TreeSelectionModel.DISCONTIGUOUS_TREE_SELECTION);
myTree.getSelectionModel().addTreeSelectionListener(e -> onSelectionChanged());
myTree.addFocusListener(new FocusListener() {
void updateIfMultipleSelection() {
if (myTree != null && myTree.getSelectionCount() > 1) {
onSelectionChanged();
}
}
@Override
public void focusGained(FocusEvent e) {
updateIfMultipleSelection();
}
@Override
public void focusLost(FocusEvent e) {
updateIfMultipleSelection();
}
});
myTree.setRootVisible(false);
myTree.setShowsRootHandles(true);
myTree.expandPath(new TreePath(myTree.getModel().getRoot()));
EditSourceOnDoubleClickHandler.install(myTree);
EditSourceOnEnterKeyHandler.install(myTree);
ToolTipManager.sharedInstance().registerComponent(myTree);
TreeUtil.installActions(myTree);
new MySpeedSearch(myTree);
myTree.addKeyListener(new PsiCopyPasteManager.EscapeHandler());
CustomizationUtil.installPopupHandler(myTree, IdeActions.GROUP_PROJECT_VIEW_POPUP, ActionPlaces.PROJECT_VIEW_POPUP);
}
protected void onSelectionChanged() {
if (myTree != null && myTree.getSelectionModel() != null) {
int count = myTree.getSelectionModel().getSelectionCount();
String description = count > 1 && myTree.hasFocus() ? LangBundle.message("project.view.elements.selected", count) : null;
ActionMenu.showDescriptionInStatusBar(true, myTree, description);
}
}
@NotNull
@Override
public final ActionCallback updateFromRoot(boolean restoreExpandedPaths) {
Runnable afterUpdate;
final ActionCallback cb = new ActionCallback();
AbstractTreeBuilder builder = getTreeBuilder();
if (restoreExpandedPaths && builder != null) {
List<Object> pathsToExpand = new ArrayList<>();
List<Object> selectionPaths = new ArrayList<>();
TreeBuilderUtil.storePaths(builder, (DefaultMutableTreeNode)myTree.getModel().getRoot(), pathsToExpand, selectionPaths, true);
afterUpdate = () -> {
if (myTree != null && !builder.isDisposed()) {
myTree.clearSelection();
TreeBuilderUtil.restorePaths(builder, pathsToExpand, selectionPaths, true);
}
cb.setDone();
};
}
else {
afterUpdate = cb.createSetDoneRunnable();
}
if (builder != null) {
builder.addSubtreeToUpdate(builder.getRootNode(), afterUpdate);
}
else if (myAsyncSupport != null) {
myAsyncSupport.updateAll(afterUpdate);
}
else {
return ActionCallback.REJECTED;
}
return cb;
}
@Override
public void select(Object element, VirtualFile file, boolean requestFocus) {
selectCB(element, file, requestFocus);
}
@NotNull
public ActionCallback selectCB(Object element, VirtualFile file, boolean requestFocus) {
if (file != null) {
AbstractTreeBuilder builder = getTreeBuilder();
if (builder instanceof BaseProjectTreeBuilder) {
beforeSelect().doWhenDone(() -> UIUtil.invokeLaterIfNeeded(() -> {
if (!builder.isDisposed()) {
((BaseProjectTreeBuilder)builder).selectAsync(element, file, requestFocus);
}
}));
}
else if (myAsyncSupport != null) {
return myAsyncSupport.select(myTree, element, file);
}
}
return ActionCallback.DONE;
}
@NotNull
public ActionCallback beforeSelect() {
// actually, getInitialized().doWhenDone() should be called by builder internally
// this will be done in 2017
AbstractTreeBuilder builder = getTreeBuilder();
if (builder == null) return ActionCallback.DONE;
return builder.getInitialized();
}
protected BaseProjectTreeBuilder createBuilder(@NotNull DefaultTreeModel treeModel) {
return new ProjectTreeBuilder(myProject, myTree, treeModel, null, (ProjectAbstractTreeStructureBase)myTreeStructure) {
@Override
protected AbstractTreeUpdater createUpdater() {
return createTreeUpdater(this);
}
};
}
@NotNull
protected abstract ProjectAbstractTreeStructureBase createStructure();
@NotNull
protected abstract ProjectViewTree createTree(@NotNull DefaultTreeModel treeModel);
@NotNull
protected abstract AbstractTreeUpdater createTreeUpdater(@NotNull AbstractTreeBuilder treeBuilder);
/**
* @param object an object that represents a node in the project tree
* @param expanded {@code true} if the corresponding node is expanded,
* {@code false} if it is collapsed
* @return a non-null value if the corresponding node should be , or {@code null}
*/
protected ErrorStripe getStripe(Object object, boolean expanded) {
if (expanded && object instanceof PsiDirectoryNode) return null;
if (object instanceof PresentableNodeDescriptor) {
PresentableNodeDescriptor node = (PresentableNodeDescriptor)object;
TextAttributesKey key = node.getPresentation().getTextAttributesKey();
TextAttributes attributes = key == null ? null : EditorColorsManager.getInstance().getSchemeForCurrentUITheme().getAttributes(key);
Color color = attributes == null ? null : attributes.getErrorStripeColor();
if (color != null) return ErrorStripe.create(color, 1);
}
return null;
}
protected static final class MySpeedSearch extends TreeSpeedSearch {
MySpeedSearch(JTree tree) {
super(tree);
}
@Override
protected boolean isMatchingElement(Object element, String pattern) {
Object userObject = ((DefaultMutableTreeNode)((TreePath)element).getLastPathComponent()).getUserObject();
if (userObject instanceof PsiDirectoryNode) {
String str = getElementText(element);
if (str == null) return false;
if (pattern.indexOf('.') >= 0) {
return compare(str, pattern);
}
StringTokenizer tokenizer = new StringTokenizer(str, ".");
while (tokenizer.hasMoreTokens()) {
String token = tokenizer.nextToken();
if (compare(token, pattern)) {
return true;
}
}
return false;
}
else {
return super.isMatchingElement(element, pattern);
}
}
}
@Override
AsyncProjectViewSupport getAsyncSupport() {
return myAsyncSupport;
}
@ApiStatus.Internal
@NotNull
public AsyncProjectViewSupport createAsyncSupport(@NotNull Disposable parent, @NotNull Comparator<NodeDescriptor<?>> comparator) {
return new AsyncProjectViewSupport(parent, myProject, createStructure(), comparator);
}
}
| |
/**
* Copyright (c) 2015 Salesforce Marketing Cloud.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.exacttarget.jb4a.sdkexplorer;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import com.exacttarget.etpushsdk.ETPush;
import com.exacttarget.jb4a.sdkexplorer.utils.Utils;
import org.json.JSONObject;
import java.util.Calendar;
import java.util.Iterator;
public class SDK_ExplorerDisplayMessageActivity extends BaseActivity {
private static final String TAG = Utils.formatTag(SDK_ExplorerDisplayMessageActivity.class.getSimpleName()) ;
private int currentPage = CONSTS.DISPLAY_MESSAGE_ACTIVITY;
private long payloadReceived = -1;
private String payloadStr = "";
private String messageTitle = "";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.notification_layout);
if (savedInstanceState == null) {
Bundle extras = getIntent().getExtras();
if (extras == null) {
// get fields from last push received (saved by SDK_ExplorerNotificationReceiver)
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(SDK_ExplorerApp.context());
payloadReceived = sp.getLong(CONSTS.KEY_PUSH_RECEIVED_DATE, -1);
payloadStr = sp.getString(CONSTS.KEY_PUSH_RECEIVED_PAYLOAD, "");
messageTitle = getString(R.string.display_last_message_activity_title);
} else {
payloadReceived = extras.getLong(CONSTS.KEY_PUSH_RECEIVED_DATE, -1);
payloadStr = extras.getString(CONSTS.KEY_PUSH_RECEIVED_PAYLOAD);
if (payloadStr == null) {
payloadStr = "";
}
messageTitle = getString(R.string.display_current_message_activity_title);
}
prepareDisplay(true);
} else {
payloadReceived = savedInstanceState.getLong(CONSTS.KEY_PUSH_RECEIVED_DATE, -1);
payloadStr = savedInstanceState.getString(CONSTS.KEY_PUSH_RECEIVED_PAYLOAD);
if (payloadStr == null) {
payloadStr = "";
}
messageTitle = savedInstanceState.getString("messageTitle");
if (messageTitle == null) {
messageTitle = "";
}
prepareDisplay(false);
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(CONSTS.KEY_CURRENT_PAGE, currentPage);
outState.putLong(CONSTS.KEY_PUSH_RECEIVED_DATE, payloadReceived);
outState.putString(CONSTS.KEY_PUSH_RECEIVED_PAYLOAD, payloadStr);
outState.putString("messageTitle", messageTitle);
}
@Override
protected void onResume() {
super.onResume();
Utils.setActivityTitle(this, messageTitle);
getActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.global_menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
Utils.prepareMenu(currentPage, menu);
return super.onPrepareOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Boolean result = Utils.selectMenuItem(this, currentPage, item);
return result != null ? result : super.onOptionsItemSelected(item);
}
@Override
public void onBackPressed() {
super.onBackPressed();
}
private void prepareDisplay(boolean firstOpen) {
StringBuilder sb = new StringBuilder();
if (payloadReceived == -1 | payloadStr == null) {
// nothing to show since no push notification has been received since last installation.
sb.append("<b>No Push notifications have been received since this app was installed.</b> ");
} else {
// show previous payload
sb.append("<b>Payload Sent on: ");
// show date received
Calendar payloadReceivedDate = Calendar.getInstance();
payloadReceivedDate.setTimeInMillis(payloadReceived);
android.text.format.DateFormat df = new android.text.format.DateFormat();
sb.append(df.format("yyyy-MM-dd hh:mm:ss", payloadReceivedDate.getTime()));
sb.append("</b> ");
// convert JSON String of saved payload back to bundle to display
JSONObject jo = null;
try {
jo = new JSONObject(payloadStr);
} catch (Exception e) {
if (ETPush.getLogLevel() <= Log.ERROR) {
Log.e(TAG, e.getMessage(), e);
}
}
if (jo != null) {
sb.append("<b>Payload Sent with Message</b> ");
sb.append("<br/><br/>");
sb.append("<i>Key/Value pairs:</i> ");
Iterator<String> iterator = jo.keys();
while (iterator.hasNext()) {
String key = iterator.next();
try {
Object value = jo.get(key);
sb.append("<br/> ");
sb.append("<u>");
sb.append(key);
sb.append("</u>");
sb.append(" : ");
sb.append(value);
} catch (Exception e) {
if (ETPush.getLogLevel() <= Log.ERROR) {
Log.e(TAG, e.getMessage(), e);
}
}
}
try {
sb.append("<br/><br/>");
sb.append("<i>Custom Keys (Discount Code):</i> ");
String payloadDiscountStr = "";
if (jo.has(CONSTS.KEY_PAYLOAD_DISCOUNT)) {
payloadDiscountStr = jo.getString(CONSTS.KEY_PAYLOAD_DISCOUNT);
}
if (!payloadDiscountStr.equals("")) {
// have an actual discount code
// CUSTOM KEYS
sb.append(payloadDiscountStr);
if (firstOpen) {
// if the Activity was refreshed, then don't flow to the discount screen.
Intent intent = new Intent(SDK_ExplorerDisplayMessageActivity.this, SDK_ExplorerDiscountActivity.class);
intent.putExtra(CONSTS.KEY_PUSH_RECEIVED_PAYLOAD, payloadStr);
startActivity(intent);
}
} else {
sb.append("n/a");
sb.append("<br/>");
sb.append("NOTE: No discount_code key was sent with this message.");
}
} catch (Exception e) {
sb.append("Problem displaying Custom Keys (Discount Code). Check logcat.");
if (ETPush.getLogLevel() <= Log.ERROR) {
Log.e(TAG, e.getMessage(), e);
}
}
} else {
// show current push notification received, but payload is null
sb.append("<b>Problem parsing payload from last push notification. Check logcat.</b> ");
}
}
Utils.setWebView(this, R.id.notificationWV, sb, false);
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cache.impl;
import com.hazelcast.config.CacheConfig;
import com.hazelcast.core.ExecutionCallback;
import com.hazelcast.core.HazelcastInstanceAware;
import com.hazelcast.logging.ILogger;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.serialization.Data;
import com.hazelcast.spi.AbstractDistributedObject;
import com.hazelcast.spi.ExecutionService;
import com.hazelcast.spi.NodeEngine;
import com.hazelcast.spi.OperationFactory;
import com.hazelcast.spi.OperationService;
import com.hazelcast.spi.partition.IPartitionService;
import com.hazelcast.spi.serialization.SerializationService;
import com.hazelcast.util.executor.CompletableFutureTask;
import javax.cache.CacheException;
import javax.cache.integration.CompletionListener;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.hazelcast.cache.impl.CacheProxyUtil.validateResults;
import static com.hazelcast.util.ExceptionUtil.rethrow;
/**
* Abstract class providing cache open/close operations and {@link NodeEngine}, {@link CacheService} and
* {@link SerializationService} accessor which will be used by implementation of {@link com.hazelcast.cache.ICache}
* in server or embedded mode.
*
* @param <K> the type of key.
* @param <V> the type of value.
* @see com.hazelcast.cache.impl.CacheProxy
*/
abstract class AbstractCacheProxyBase<K, V>
extends AbstractDistributedObject<ICacheService>
implements ICacheInternal<K, V> {
private static final int TIMEOUT = 10;
protected final ILogger logger;
protected final CacheConfig<K, V> cacheConfig;
protected final String name;
protected final String nameWithPrefix;
protected final ICacheService cacheService;
protected final SerializationService serializationService;
protected final CacheOperationProvider operationProvider;
protected final IPartitionService partitionService;
private final NodeEngine nodeEngine;
private final CopyOnWriteArrayList<Future> loadAllTasks = new CopyOnWriteArrayList<Future>();
private final AtomicBoolean isClosed = new AtomicBoolean(false);
private final AtomicBoolean isDestroyed = new AtomicBoolean(false);
AbstractCacheProxyBase(CacheConfig<K, V> cacheConfig, NodeEngine nodeEngine, ICacheService cacheService) {
super(nodeEngine, cacheService);
this.name = cacheConfig.getName();
this.nameWithPrefix = cacheConfig.getNameWithPrefix();
this.cacheConfig = cacheConfig;
this.nodeEngine = nodeEngine;
this.logger = nodeEngine.getLogger(getClass());
this.partitionService = nodeEngine.getPartitionService();
this.cacheService = cacheService;
this.serializationService = nodeEngine.getSerializationService();
this.operationProvider =
cacheService.getCacheOperationProvider(nameWithPrefix, cacheConfig.getInMemoryFormat());
}
void injectDependencies(Object obj) {
if (obj instanceof HazelcastInstanceAware) {
((HazelcastInstanceAware) obj).setHazelcastInstance(nodeEngine.getHazelcastInstance());
}
}
@Override
public String getName() {
return name;
}
@Override
protected String getDistributedObjectName() {
return nameWithPrefix;
}
@Override
public String getPrefixedName() {
return nameWithPrefix;
}
@Override
public String getServiceName() {
return ICacheService.SERVICE_NAME;
}
@Override
public void open() {
if (isDestroyed.get()) {
throw new IllegalStateException("Cache is already destroyed! Cannot be reopened");
}
isClosed.compareAndSet(true, false);
}
@Override
public void close() {
if (!isClosed.compareAndSet(false, true)) {
return;
}
Exception caughtException = null;
for (Future f : loadAllTasks) {
try {
f.get(TIMEOUT, TimeUnit.SECONDS);
} catch (Exception e) {
if (caughtException == null) {
caughtException = e;
}
getNodeEngine().getLogger(getClass()).warning("Problem while waiting for loadAll tasks to complete", e);
}
}
loadAllTasks.clear();
closeListeners();
if (caughtException != null) {
throw new CacheException("Problem while waiting for loadAll tasks to complete", caughtException);
}
}
@Override
protected boolean preDestroy() {
close();
if (!isDestroyed.compareAndSet(false, true)) {
return false;
}
isClosed.set(true);
return true;
}
@Override
public boolean isClosed() {
return isClosed.get();
}
@Override
public boolean isDestroyed() {
return isDestroyed.get();
}
abstract void closeListeners();
void ensureOpen() {
if (isClosed()) {
throw new IllegalStateException("Cache operations can not be performed. The cache closed");
}
}
@SuppressWarnings("unchecked")
void submitLoadAllTask(LoadAllTask loadAllTask) {
ExecutionService executionService = nodeEngine.getExecutionService();
final CompletableFutureTask<Object> future =
(CompletableFutureTask<Object>) executionService.submit("loadAll-" + nameWithPrefix, loadAllTask);
loadAllTasks.add(future);
future.andThen(new ExecutionCallback() {
@Override
public void onResponse(Object response) {
loadAllTasks.remove(future);
}
@Override
public void onFailure(Throwable t) {
loadAllTasks.remove(future);
getNodeEngine().getLogger(getClass()).warning("Problem in loadAll task", t);
}
});
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AbstractCacheProxyBase that = (AbstractCacheProxyBase) o;
if (nameWithPrefix != null ? !nameWithPrefix.equals(that.nameWithPrefix) : that.nameWithPrefix != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return nameWithPrefix != null ? nameWithPrefix.hashCode() : 0;
}
@Override
public String toString() {
return getClass().getName() + '{' + "name=" + name + ", nameWithPrefix=" + nameWithPrefix + '}';
}
final class LoadAllTask implements Runnable {
private final CompletionListener completionListener;
private final CacheOperationProvider operationProvider;
private final Set<Data> keysData;
private final boolean replaceExistingValues;
LoadAllTask(CacheOperationProvider operationProvider, Set<Data> keysData,
boolean replaceExistingValues, CompletionListener completionListener) {
this.operationProvider = operationProvider;
this.keysData = keysData;
this.replaceExistingValues = replaceExistingValues;
this.completionListener = completionListener;
}
@Override
public void run() {
try {
injectDependencies(completionListener);
OperationService operationService = getNodeEngine().getOperationService();
OperationFactory operationFactory;
IPartitionService partitionService = getNodeEngine().getPartitionService();
Map<Address, List<Integer>> memberPartitionsMap = partitionService.getMemberPartitionsMap();
Map<Integer, Object> results = new HashMap<Integer, Object>();
for (Map.Entry<Address, List<Integer>> memberPartitions : memberPartitionsMap.entrySet()) {
Set<Integer> partitions = new HashSet<Integer>(memberPartitions.getValue());
Set<Data> ownerKeys = filterOwnerKeys(partitionService, partitions);
operationFactory = operationProvider.createLoadAllOperationFactory(ownerKeys, replaceExistingValues);
Map<Integer, Object> memberResults;
memberResults = operationService.invokeOnPartitions(getServiceName(), operationFactory, partitions);
results.putAll(memberResults);
}
validateResults(results);
if (completionListener != null) {
completionListener.onCompletion();
}
} catch (Exception e) {
if (completionListener != null) {
completionListener.onException(e);
}
} catch (Throwable t) {
if (t instanceof OutOfMemoryError) {
throw rethrow(t);
} else {
if (completionListener != null) {
completionListener.onException(new CacheException(t));
}
}
}
}
private Set<Data> filterOwnerKeys(IPartitionService partitionService, Set<Integer> partitions) {
Set<Data> ownerKeys = new HashSet<Data>();
for (Data key : keysData) {
int keyPartitionId = partitionService.getPartitionId(key);
if (partitions.contains(keyPartitionId)) {
ownerKeys.add(key);
}
}
return ownerKeys;
}
}
}
| |
/**
* Copyright (c) 2012 by Tyson Gern
* Licensed under the MIT License
*/
import java.util.*;
/**
* This class stores an element of a Coxeter group of type D and rank
* "size" as a signed permutation, oneLine. The methods contained in
* this class can preform elementary operations on the element.
* @author Tyson Gern (tygern@gmail.com)
*/
class TypeD extends EvenElement{
/**
* This constructs an element from a signed permutation.
* @param input The signed permutation
*/
public TypeD(int[] input) {
size = input.length;
rank = input.length;
oneLine = new int[size];
int sign = 1;
boolean[] number = new boolean[size];
for (int i = 0; i < size; i++) number[i] = false;
for(int i = 0; i < size; i++) {
if (Math.abs(input[i]) > size || input[i] == 0 || number[Math.abs(input[i]) - 1]) {
throw new IllegalArgumentException("Invalid permutation");
}
oneLine[i] = input[i];
number[Math.abs(input[i]) - 1] = true;
if (oneLine[i] < 0) {
sign *= -1;
}
}
if (sign == -1) {
throw new IllegalArgumentException("Invalid type");
}
}
/**
* This constructs the identity element of a particular rank.
* @param rank The rank of the element
*/
protected TypeD(int rank) {
this.rank = rank;
this.size = rank;
oneLine = new int[size];
for (int i = 0; i < size; i++) {
oneLine[i] = i + 1;
}
}
/**
* This method gets the inverse
* @return The inverse of element
*/
public TypeD findInverse() {
return new TypeD(this.invertPermutation());
}
/**
* This method multiplies the element on the right by a generator,
* s.
* @param s The generator
* @return Nothing
*/
private TypeD rightMultiplyS(int s) {
if (s <= rank && s >= 1) {
int sign = 1;
if (s == 1) {
sign = -1;
s = 2;
}
switchPlaces(s-2, s-1, sign);
}
return this;
}
/**
* This method multiplies the element on the left by a generator,
* s.
* @param s The generator
* @return Nothing
*/
private TypeD leftMultiplyS(int s) {
if (s <= rank && s >= 1) {
int sign = 1;
if (s == 1) {
sign = -1;
s = 2;
}
switchValues(s - 1, s, sign);
}
return this;
}
/**
* This method tells if an element has a reduced expression ending
* in two noncommution generators.
* @return true if the element is right bad
*/
private boolean isRightBad() {
if (-1 * oneLine[0] > oneLine[2]) return false; // 13 or 31
for(int j = 0; j <= size - 3; j++) {
if (oneLine[j] > oneLine[j+2]) return false;
// if (oneLine[j] > oneLine[j + 1] && oneLine[j + 1] > oneLine[j + 2]) return false; //321
// if (oneLine[j] > oneLine[j + 2] && oneLine[j + 2] > oneLine[j + 1]) return false; //312
// if (oneLine[j + 1] > oneLine[j] && oneLine[j] > oneLine[j + 2]) return false; //231
}
return true;
}
/**
* This method tells if an element has a reduced expression
* beginning in two noncommution generators.
* @return true if the element is left bad
*/
private boolean isLeftBad() {
return findInverse().isRightBad();
}
/**
* This method tells if an element has a reduced expression
* beginning or ending in two noncommution generators, or if the
* element is a product of commuting generators.
* @return true if the element is bad
*/
public boolean isBad() {
if (commutingGenerators()) {
return false;
}
return (isRightBad() && isLeftBad());
}
/**
* This method tells if an element is a product of commuting
* generators.
* @return true if the element is a product of commuting
* generators
*/
private boolean commutingGenerators() {
int j = 0;
if (oneLine[0] == 1) {
j = 1;
}
else if (oneLine[0] == -1) {
if (oneLine[1] != -2) {
return false;
}
j = 2;
}
else if (oneLine[0] == 2) {
if (oneLine[1] != 1) {
return false;
}
j = 2;
}
else if (oneLine[0] == -2) {
if (oneLine[1] != -1) {
return false;
}
j = 2;
}
else {
return false;
}
while (j < size - 2) {
if (oneLine[j] > j + 2) {
return false;
}
else if (oneLine[j] == j + 1) {
j += 1;
}
else { // oneLine[j] = j + 2
if (oneLine[j + 1] != j + 1) return false;
j += 2;
}
}
return true;
}
/**
* This method gets the length of the element
* @return the length of the element.
*/
public int length() {
return countInv(1) + countInv(-1);
}
/**
* This method returns true if s is in the right descent set of
* the element, false otherwise.
* @return true is s is a descent of the element
*/
protected boolean isRightDescent(int s) {
if (s == 1) {
return (-1 * oneLine[1] > oneLine[0]);
}
if (s >= 2 && s <= rank) {
return (oneLine[s - 2] > oneLine[s - 1]);
}
return false;
}
/**
* This method creates a TypeDExpression reduced expression from a
* signed permutation.
* @return a reduced expression
*/
public TypeDExpression findRE() {
ArrayList<Integer> generator = new ArrayList<Integer> ();
TypeD permutation = new TypeD(oneLine);
while (permutation.length() != 0) {
for (int i = permutation.size; i >= 1; i--) {
if (permutation.isRightDescent(i)) {
generator.add(i);
permutation.rightMultiplyS(i);
}
}
}
int length = generator.size();
int[] genArray = new int[length];
for (int i = 0; i < length; i++) {
genArray[length - 1 - i] = generator.get(i).intValue();
}
TypeDExpression redExp = new TypeDExpression(genArray, permutation.rank);
return redExp;
}
/**
* This method multiplies a signed permutation on the right by
* another signed permutation.
* @param the other element
* @return the product of this and other
*/
public TypeD rightMultiply(TypeD other) {
if (!compatible(other)) return null;
return new TypeD(rightMultiplyPerm(other));
}
/**
* This method multiplies a signed permutation on the left by
* another signed permutation.
* @param the other element
* @return the product of this and other
*/
public TypeD leftMultiply(TypeD other) {
return other.rightMultiply(this);
}
}
| |
/* JAI-Ext - OpenSource Java Advanced Image Extensions Library
* http://www.geo-solutions.it/
* Copyright 2018 GeoSolutions
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (c) 2011, Michael Bedward. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package it.geosolutions.jaiext.jiffle.runtime;
import it.geosolutions.jaiext.jiffle.JiffleException;
import org.junit.Test;
/**
* Unit tests for Jiffle's loop statements.
*
* @author Michael Bedward
* @since 0.1
* @version $Id$
*/
public class LoopTest extends RuntimeTestBase {
@Test
public void whileLoopWithSimpleStatement() throws Exception {
System.out.println(" while loop with simple statement");
String script =
"n = 0; \n"
+ "while (n < x()) n++; \n"
+ "dest = n;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
int xx = x;
move();
return xx;
}
};
testScript(script, e);
}
@Test
public void whileLoopWithBlock() throws Exception {
System.out.println(" while loop with block");
String script =
"n = 0; \n"
+ "i = 0; \n"
+ "while (i < x()) { n += i; i++ ; } \n"
+ "dest = n;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
int n = 0;
for (int i = 0; i < x; i++) n += i;
move();
return n;
}
};
testScript(script, e);
}
@Test
public void untilLoopWithSimpleStatement() throws Exception {
System.out.println(" until loop with simple statement");
String script =
"n = 0; \n"
+ "until (n > x()) n++; \n"
+ "dest = n;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
int xx = x;
move();
return xx + 1;
}
};
testScript(script, e);
}
@Test
public void untilLoopWithBlock() throws Exception {
System.out.println(" until loop with block");
String script =
"n = 0; \n"
+ "i = 0; \n"
+ "until (i > x()) { n += i; i++ ; } \n"
+ "dest = n;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
int n = 0;
for (int i = 0; i <= x; i++) n += i;
move();
return n;
}
};
testScript(script, e);
}
@Test
public void foreachListLoopWithSimpleStatement() throws Exception {
System.out.println(" foreach (i in [x(), y(), 3]) simple statement");
String script =
"z = 0;"
+ "foreach (i in [x(), y(), 3]) z += i;"
+ "dest = z;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
double z = x + y + 3;
move();
return z;
}
};
testScript(script, e);
}
@Test
public void foreachListLoopWithBlock() throws Exception {
System.out.println(" foreach (i in [x(), y(), 3]) block");
String script =
"z = 0;"
+ "foreach (i in [x(), y(), 3]) \n"
+ "{ \n"
+ " temp = i * 2; \n"
+ " z += temp; \n"
+ "} \n"
+ "dest = z;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
double z = 2*(x + y + 3);
move();
return z;
}
};
testScript(script, e);
}
@Test
public void foreachSequenceLoopWithSimpleStatement() throws Exception {
System.out.println(" foreach (i in -1:5) simple statement");
String script =
"z = 0; \n"
+ "foreach (i in -1:5) z += i*src; \n"
+ "dest = z;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
double z = 0;
for (int i = -1; i <= 5; i++) z += val * i;
move();
return z;
}
};
testScript(script, e);
}
@Test
public void foreachSequenceLoopWithBlock() throws Exception {
System.out.println(" foreach (i in -1:5) block");
String script =
"z = 0; \n"
+ "foreach (i in -1:5) { \n"
+ " temp = i * src; \n"
+ " z += temp; \n"
+ "} \n"
+ "dest = z;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
double z = 0;
for (int i = -1; i <= 5; i++) z += val * i;
move();
return z;
}
};
testScript(script, e);
}
@Test
public void breakif() throws Exception {
System.out.println(" breakif");
String script =
"n = 0; \n"
+ "i = 0; \n"
+ "while (i < x()) { \n"
+ " n += i; \n"
+ " breakif(n >= 10); \n"
+ " i++ ; \n"
+ "} \n"
+ "dest = n;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
int n = 0;
for (int i = 0; i < x; i++) n += i;
move();
return (n < 10 ? n : 10);
}
};
testScript(script, e);
}
@Test
public void breakifNestedInIf() throws Exception {
System.out.println(" breakif nested in if-block");
String script =
"n = 0; \n"
+ "i = 0; \n"
+ "while (i < x()) { \n"
+ " n += i; \n"
+ " if (true) { \n"
+ " breakif(n >= 10); \n"
+ " } \n"
+ " i++ ; \n"
+ "} \n"
+ "dest = n;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
int n = 0;
for (int i = 0; i < x; i++) n += i;
move();
return (n < 10 ? n : 10);
}
};
testScript(script, e);
}
@Test
public void unconditionalBreak() throws Exception {
System.out.println(" unconditional break");
String script =
"i = 0;"
+ "while (i < src) { \n"
+ " if (++i >= 5) break;"
+ "} \n"
+ "dest = i;";
Evaluator e = new Evaluator() {
public double eval(double val) {
return Math.min(val, 5.0);
}
};
testScript(script, e);
}
@Test(expected=JiffleException.class)
public void breakifStatementOutsideOfLoop() throws Exception {
System.out.println(" breakif statement outside loop throws exception");
String script =
"i = 42;\n"
+ "breakif( i == 42 );\n"
+ "dest = i;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
throw new IllegalStateException("Should not be called");
}
};
testScript(script, e);
}
@Test(expected=JiffleException.class)
public void breakStatementOutsideOfLoop() throws Exception {
System.out.println(" break statement outside loop throws exception");
String script =
"i = 42;\n"
+ "break;\n"
+ "dest = i;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
throw new IllegalStateException("Should not be called");
}
};
testScript(script, e);
}
@Test
public void nestedForEachLoops() throws Exception {
System.out.println(" nested foreach loops");
String script =
"n = 0;"
+ "foreach (i in 1:5) { \n"
+ " foreach (j in i:(i+5)) { \n"
+ " n += i + j; \n"
+ " } \n"
+ "} \n"
+ "dest = src + n;" ;
Evaluator e = new Evaluator() {
public double eval(double val) {
double z = val;
for (int i = 1; i <= 5; i++) {
for (int j = i; j <= i+5; j++) {
z += i + j;
}
}
return z;
}
};
testScript(script, e);
}
@Test
public void foreachLoopWithListVar() throws Exception {
System.out.println(" using list var in foreach loop");
String script =
"options {outside = 0;} \n"
+ "foo = [-1, 0, 1]; \n"
+ "z = 0; \n"
+ "foreach (dx in foo) z += src[dx, 0]; \n"
+ "dest = z;";
Evaluator e = new Evaluator() {
public double eval(double val) {
double z = val;
if (x > 0) z += val - 1;
if (x < IMG_WIDTH-1) z += val + 1;
move();
return z;
}
};
testScript(script, e);
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.hadoopbackport.JarFinder;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier;
import org.apache.hadoop.hbase.security.token.AuthenticationTokenSelector;
import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZKClusterId;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.StringUtils;
import org.apache.zookeeper.KeeperException;
import org.cliffc.high_scale_lib.Counter;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* Utility for {@link TableMapper} and {@link TableReducer}
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TableMapReduceUtil {
static Log LOG = LogFactory.getLog(TableMapReduceUtil.class);
/**
* Use this before submitting a TableMap job. It will appropriately set up
* the job.
*
* @param table The table name to read from.
* @param scan The scan instance with the columns, time range etc.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is
* carrying all necessary HBase configuration.
* @throws IOException When setting up the details fails.
*/
public static void initTableMapperJob(String table, Scan scan,
Class<? extends TableMapper> mapper,
Class<?> outputKeyClass,
Class<?> outputValueClass, Job job)
throws IOException {
initTableMapperJob(table, scan, mapper, outputKeyClass, outputValueClass,
job, true);
}
/**
* Use this before submitting a TableMap job. It will appropriately set up
* the job.
*
* @param table Binary representation of the table name to read from.
* @param scan The scan instance with the columns, time range etc.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is
* carrying all necessary HBase configuration.
* @throws IOException When setting up the details fails.
*/
public static void initTableMapperJob(byte[] table, Scan scan,
Class<? extends TableMapper> mapper,
Class<?> outputKeyClass,
Class<?> outputValueClass, Job job)
throws IOException {
initTableMapperJob(Bytes.toString(table), scan, mapper, outputKeyClass, outputValueClass,
job, true);
}
/**
* Use this before submitting a TableMap job. It will appropriately set up
* the job.
*
* @param table The table name to read from.
* @param scan The scan instance with the columns, time range etc.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is
* carrying all necessary HBase configuration.
* @param addDependencyJars upload HBase jars and jars for any of the configured
* job classes via the distributed cache (tmpjars).
* @throws IOException When setting up the details fails.
*/
public static void initTableMapperJob(String table, Scan scan,
Class<? extends TableMapper> mapper,
Class<?> outputKeyClass,
Class<?> outputValueClass, Job job,
boolean addDependencyJars, Class<? extends InputFormat> inputFormatClass)
throws IOException {
initTableMapperJob(table, scan, mapper, outputKeyClass, outputValueClass, job,
addDependencyJars, true, inputFormatClass);
}
/**
* Use this before submitting a TableMap job. It will appropriately set up
* the job.
*
* @param table The table name to read from.
* @param scan The scan instance with the columns, time range etc.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is
* carrying all necessary HBase configuration.
* @param addDependencyJars upload HBase jars and jars for any of the configured
* job classes via the distributed cache (tmpjars).
* @param initCredentials whether to initialize hbase auth credentials for the job
* @param inputFormatClass the input format
* @throws IOException When setting up the details fails.
*/
public static void initTableMapperJob(String table, Scan scan,
Class<? extends TableMapper> mapper,
Class<?> outputKeyClass,
Class<?> outputValueClass, Job job,
boolean addDependencyJars, boolean initCredentials,
Class<? extends InputFormat> inputFormatClass)
throws IOException {
job.setInputFormatClass(inputFormatClass);
if (outputValueClass != null) job.setMapOutputValueClass(outputValueClass);
if (outputKeyClass != null) job.setMapOutputKeyClass(outputKeyClass);
job.setMapperClass(mapper);
if (Put.class.equals(outputValueClass)) {
job.setCombinerClass(PutCombiner.class);
}
Configuration conf = job.getConfiguration();
HBaseConfiguration.merge(conf, HBaseConfiguration.create(conf));
conf.set(TableInputFormat.INPUT_TABLE, table);
conf.set(TableInputFormat.SCAN, convertScanToString(scan));
conf.setStrings("io.serializations", conf.get("io.serializations"),
MutationSerialization.class.getName(), ResultSerialization.class.getName(),
KeyValueSerialization.class.getName());
if (addDependencyJars) {
addDependencyJars(job);
}
if (initCredentials) {
initCredentials(job);
}
}
/**
* Use this before submitting a TableMap job. It will appropriately set up
* the job.
*
* @param table Binary representation of the table name to read from.
* @param scan The scan instance with the columns, time range etc.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is
* carrying all necessary HBase configuration.
* @param addDependencyJars upload HBase jars and jars for any of the configured
* job classes via the distributed cache (tmpjars).
* @param inputFormatClass The class of the input format
* @throws IOException When setting up the details fails.
*/
public static void initTableMapperJob(byte[] table, Scan scan,
Class<? extends TableMapper> mapper,
Class<?> outputKeyClass,
Class<?> outputValueClass, Job job,
boolean addDependencyJars, Class<? extends InputFormat> inputFormatClass)
throws IOException {
initTableMapperJob(Bytes.toString(table), scan, mapper, outputKeyClass,
outputValueClass, job, addDependencyJars, inputFormatClass);
}
/**
* Use this before submitting a TableMap job. It will appropriately set up
* the job.
*
* @param table Binary representation of the table name to read from.
* @param scan The scan instance with the columns, time range etc.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is
* carrying all necessary HBase configuration.
* @param addDependencyJars upload HBase jars and jars for any of the configured
* job classes via the distributed cache (tmpjars).
* @throws IOException When setting up the details fails.
*/
public static void initTableMapperJob(byte[] table, Scan scan,
Class<? extends TableMapper> mapper,
Class<?> outputKeyClass,
Class<?> outputValueClass, Job job,
boolean addDependencyJars)
throws IOException {
initTableMapperJob(Bytes.toString(table), scan, mapper, outputKeyClass,
outputValueClass, job, addDependencyJars, TableInputFormat.class);
}
/**
* Use this before submitting a TableMap job. It will appropriately set up
* the job.
*
* @param table The table name to read from.
* @param scan The scan instance with the columns, time range etc.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is
* carrying all necessary HBase configuration.
* @param addDependencyJars upload HBase jars and jars for any of the configured
* job classes via the distributed cache (tmpjars).
* @throws IOException When setting up the details fails.
*/
public static void initTableMapperJob(String table, Scan scan,
Class<? extends TableMapper> mapper,
Class<?> outputKeyClass,
Class<?> outputValueClass, Job job,
boolean addDependencyJars)
throws IOException {
initTableMapperJob(table, scan, mapper, outputKeyClass,
outputValueClass, job, addDependencyJars, TableInputFormat.class);
}
/**
* Sets up the job for reading from a table snapshot. It bypasses hbase servers
* and read directly from snapshot files.
*
* @param snapshotName The name of the snapshot (of a table) to read from.
* @param scan The scan instance with the columns, time range etc.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is
* carrying all necessary HBase configuration.
* @param addDependencyJars upload HBase jars and jars for any of the configured
* job classes via the distributed cache (tmpjars).
*
* @param tmpRestoreDir a temporary directory to copy the snapshot files into. Current user should
* have write permissions to this directory, and this should not be a subdirectory of rootdir.
* After the job is finished, restore directory can be deleted.
* @throws IOException When setting up the details fails.
* @see TableSnapshotInputFormat
*/
public static void initTableSnapshotMapperJob(String snapshotName, Scan scan,
Class<? extends TableMapper> mapper,
Class<?> outputKeyClass,
Class<?> outputValueClass, Job job,
boolean addDependencyJars, Path tmpRestoreDir)
throws IOException {
TableSnapshotInputFormat.setInput(job, snapshotName, tmpRestoreDir);
initTableMapperJob(snapshotName, scan, mapper, outputKeyClass,
outputValueClass, job, addDependencyJars, false, TableSnapshotInputFormat.class);
/*
* Enable a basic on-heap cache for these jobs. Any BlockCache implementation based on
* direct memory will likely cause the map tasks to OOM when opening the region. This
* is done here instead of in TableSnapshotRegionRecordReader in case an advanced user
* wants to override this behavior in their job.
*/
job.getConfiguration().setFloat(
HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT);
job.getConfiguration().setFloat("hbase.offheapcache.percentage", 0f);
job.getConfiguration().setFloat("hbase.bucketcache.size", 0f);
// We would need even more libraries that hbase-server depends on
TableMapReduceUtil.addDependencyJars(job.getConfiguration(), Counter.class);
}
/**
* Use this before submitting a Multi TableMap job. It will appropriately set
* up the job.
*
* @param scans The list of {@link Scan} objects to read from.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is carrying
* all necessary HBase configuration.
* @throws IOException When setting up the details fails.
*/
public static void initTableMapperJob(List<Scan> scans,
Class<? extends TableMapper> mapper,
Class<? extends WritableComparable> outputKeyClass,
Class<? extends Writable> outputValueClass, Job job) throws IOException {
initTableMapperJob(scans, mapper, outputKeyClass, outputValueClass, job,
true);
}
/**
* Use this before submitting a Multi TableMap job. It will appropriately set
* up the job.
*
* @param scans The list of {@link Scan} objects to read from.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is carrying
* all necessary HBase configuration.
* @param addDependencyJars upload HBase jars and jars for any of the
* configured job classes via the distributed cache (tmpjars).
* @throws IOException When setting up the details fails.
*/
public static void initTableMapperJob(List<Scan> scans,
Class<? extends TableMapper> mapper,
Class<? extends WritableComparable> outputKeyClass,
Class<? extends Writable> outputValueClass, Job job,
boolean addDependencyJars) throws IOException {
initTableMapperJob(scans, mapper, outputKeyClass, outputValueClass, job,
addDependencyJars, true);
}
/**
* Use this before submitting a Multi TableMap job. It will appropriately set
* up the job.
*
* @param scans The list of {@link Scan} objects to read from.
* @param mapper The mapper class to use.
* @param outputKeyClass The class of the output key.
* @param outputValueClass The class of the output value.
* @param job The current job to adjust. Make sure the passed job is carrying
* all necessary HBase configuration.
* @param addDependencyJars upload HBase jars and jars for any of the
* configured job classes via the distributed cache (tmpjars).
* @param initCredentials whether to initialize hbase auth credentials for the job
* @throws IOException When setting up the details fails.
*/
public static void initTableMapperJob(List<Scan> scans,
Class<? extends TableMapper> mapper,
Class<? extends WritableComparable> outputKeyClass,
Class<? extends Writable> outputValueClass, Job job,
boolean addDependencyJars,
boolean initCredentials) throws IOException {
job.setInputFormatClass(MultiTableInputFormat.class);
if (outputValueClass != null) {
job.setMapOutputValueClass(outputValueClass);
}
if (outputKeyClass != null) {
job.setMapOutputKeyClass(outputKeyClass);
}
job.setMapperClass(mapper);
Configuration conf = job.getConfiguration();
HBaseConfiguration.merge(conf, HBaseConfiguration.create(conf));
List<String> scanStrings = new ArrayList<String>();
for (Scan scan : scans) {
scanStrings.add(convertScanToString(scan));
}
job.getConfiguration().setStrings(MultiTableInputFormat.SCANS,
scanStrings.toArray(new String[scanStrings.size()]));
if (addDependencyJars) {
addDependencyJars(job);
}
if (initCredentials) {
initCredentials(job);
}
}
public static void initCredentials(Job job) throws IOException {
UserProvider userProvider = UserProvider.instantiate(job.getConfiguration());
if (userProvider.isHadoopSecurityEnabled()) {
// propagate delegation related props from launcher job to MR job
if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
job.getConfiguration().set("mapreduce.job.credentials.binary",
System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
}
}
if (userProvider.isHBaseSecurityEnabled()) {
try {
// init credentials for remote cluster
String quorumAddress = job.getConfiguration().get(TableOutputFormat.QUORUM_ADDRESS);
User user = userProvider.getCurrent();
if (quorumAddress != null) {
Configuration peerConf = HBaseConfiguration.create(job.getConfiguration());
ZKUtil.applyClusterKeyToConf(peerConf, quorumAddress);
obtainAuthTokenForJob(job, peerConf, user);
}
obtainAuthTokenForJob(job, job.getConfiguration(), user);
} catch (InterruptedException ie) {
LOG.info("Interrupted obtaining user authentication token");
Thread.currentThread().interrupt();
}
}
}
/**
* Obtain an authentication token, for the specified cluster, on behalf of the current user
* and add it to the credentials for the given map reduce job.
*
* The quorumAddress is the key to the ZK ensemble, which contains:
* hbase.zookeeper.quorum, hbase.zookeeper.client.port and zookeeper.znode.parent
*
* @param job The job that requires the permission.
* @param quorumAddress string that contains the 3 required configuratins
* @throws IOException When the authentication token cannot be obtained.
*/
public static void initCredentialsForCluster(Job job, String quorumAddress)
throws IOException {
UserProvider userProvider = UserProvider.instantiate(job.getConfiguration());
if (userProvider.isHBaseSecurityEnabled()) {
try {
Configuration peerConf = HBaseConfiguration.create(job.getConfiguration());
ZKUtil.applyClusterKeyToConf(peerConf, quorumAddress);
obtainAuthTokenForJob(job, peerConf, userProvider.getCurrent());
} catch (InterruptedException e) {
LOG.info("Interrupted obtaining user authentication token");
Thread.interrupted();
}
}
}
private static void obtainAuthTokenForJob(Job job, Configuration conf, User user)
throws IOException, InterruptedException {
Token<AuthenticationTokenIdentifier> authToken = getAuthToken(conf, user);
if (authToken == null) {
user.obtainAuthTokenForJob(conf, job);
} else {
job.getCredentials().addToken(authToken.getService(), authToken);
}
}
/**
* Get the authentication token of the user for the cluster specified in the configuration
* @return null if the user does not have the token, otherwise the auth token for the cluster.
*/
private static Token<AuthenticationTokenIdentifier> getAuthToken(Configuration conf, User user)
throws IOException, InterruptedException {
ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "mr-init-credentials", null);
try {
String clusterId = ZKClusterId.readClusterIdZNode(zkw);
return new AuthenticationTokenSelector().selectToken(new Text(clusterId), user.getUGI().getTokens());
} catch (KeeperException e) {
throw new IOException(e);
} finally {
zkw.close();
}
}
/**
* Writes the given scan into a Base64 encoded string.
*
* @param scan The scan to write out.
* @return The scan saved in a Base64 encoded string.
* @throws IOException When writing the scan fails.
*/
static String convertScanToString(Scan scan) throws IOException {
ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
return Base64.encodeBytes(proto.toByteArray());
}
/**
* Converts the given Base64 string back into a Scan instance.
*
* @param base64 The scan details.
* @return The newly created Scan instance.
* @throws IOException When reading the scan instance fails.
*/
static Scan convertStringToScan(String base64) throws IOException {
byte [] decoded = Base64.decode(base64);
ClientProtos.Scan scan;
try {
scan = ClientProtos.Scan.parseFrom(decoded);
} catch (InvalidProtocolBufferException ipbe) {
throw new IOException(ipbe);
}
return ProtobufUtil.toScan(scan);
}
/**
* Use this before submitting a TableReduce job. It will
* appropriately set up the JobConf.
*
* @param table The output table.
* @param reducer The reducer class to use.
* @param job The current job to adjust.
* @throws IOException When determining the region count fails.
*/
public static void initTableReducerJob(String table,
Class<? extends TableReducer> reducer, Job job)
throws IOException {
initTableReducerJob(table, reducer, job, null);
}
/**
* Use this before submitting a TableReduce job. It will
* appropriately set up the JobConf.
*
* @param table The output table.
* @param reducer The reducer class to use.
* @param job The current job to adjust.
* @param partitioner Partitioner to use. Pass <code>null</code> to use
* default partitioner.
* @throws IOException When determining the region count fails.
*/
public static void initTableReducerJob(String table,
Class<? extends TableReducer> reducer, Job job,
Class partitioner) throws IOException {
initTableReducerJob(table, reducer, job, partitioner, null, null, null);
}
/**
* Use this before submitting a TableReduce job. It will
* appropriately set up the JobConf.
*
* @param table The output table.
* @param reducer The reducer class to use.
* @param job The current job to adjust. Make sure the passed job is
* carrying all necessary HBase configuration.
* @param partitioner Partitioner to use. Pass <code>null</code> to use
* default partitioner.
* @param quorumAddress Distant cluster to write to; default is null for
* output to the cluster that is designated in <code>hbase-site.xml</code>.
* Set this String to the zookeeper ensemble of an alternate remote cluster
* when you would have the reduce write a cluster that is other than the
* default; e.g. copying tables between clusters, the source would be
* designated by <code>hbase-site.xml</code> and this param would have the
* ensemble address of the remote cluster. The format to pass is particular.
* Pass <code> <hbase.zookeeper.quorum>:<hbase.zookeeper.client.port>:<zookeeper.znode.parent>
* </code> such as <code>server,server2,server3:2181:/hbase</code>.
* @param serverClass redefined hbase.regionserver.class
* @param serverImpl redefined hbase.regionserver.impl
* @throws IOException When determining the region count fails.
*/
public static void initTableReducerJob(String table,
Class<? extends TableReducer> reducer, Job job,
Class partitioner, String quorumAddress, String serverClass,
String serverImpl) throws IOException {
initTableReducerJob(table, reducer, job, partitioner, quorumAddress,
serverClass, serverImpl, true);
}
/**
* Use this before submitting a TableReduce job. It will
* appropriately set up the JobConf.
*
* @param table The output table.
* @param reducer The reducer class to use.
* @param job The current job to adjust. Make sure the passed job is
* carrying all necessary HBase configuration.
* @param partitioner Partitioner to use. Pass <code>null</code> to use
* default partitioner.
* @param quorumAddress Distant cluster to write to; default is null for
* output to the cluster that is designated in <code>hbase-site.xml</code>.
* Set this String to the zookeeper ensemble of an alternate remote cluster
* when you would have the reduce write a cluster that is other than the
* default; e.g. copying tables between clusters, the source would be
* designated by <code>hbase-site.xml</code> and this param would have the
* ensemble address of the remote cluster. The format to pass is particular.
* Pass <code> <hbase.zookeeper.quorum>:<hbase.zookeeper.client.port>:<zookeeper.znode.parent>
* </code> such as <code>server,server2,server3:2181:/hbase</code>.
* @param serverClass redefined hbase.regionserver.class
* @param serverImpl redefined hbase.regionserver.impl
* @param addDependencyJars upload HBase jars and jars for any of the configured
* job classes via the distributed cache (tmpjars).
* @throws IOException When determining the region count fails.
*/
public static void initTableReducerJob(String table,
Class<? extends TableReducer> reducer, Job job,
Class partitioner, String quorumAddress, String serverClass,
String serverImpl, boolean addDependencyJars) throws IOException {
Configuration conf = job.getConfiguration();
HBaseConfiguration.merge(conf, HBaseConfiguration.create(conf));
job.setOutputFormatClass(TableOutputFormat.class);
if (reducer != null) job.setReducerClass(reducer);
conf.set(TableOutputFormat.OUTPUT_TABLE, table);
conf.setStrings("io.serializations", conf.get("io.serializations"),
MutationSerialization.class.getName(), ResultSerialization.class.getName());
// If passed a quorum/ensemble address, pass it on to TableOutputFormat.
if (quorumAddress != null) {
// Calling this will validate the format
ZKUtil.transformClusterKey(quorumAddress);
conf.set(TableOutputFormat.QUORUM_ADDRESS,quorumAddress);
}
if (serverClass != null && serverImpl != null) {
conf.set(TableOutputFormat.REGION_SERVER_CLASS, serverClass);
conf.set(TableOutputFormat.REGION_SERVER_IMPL, serverImpl);
}
job.setOutputKeyClass(ImmutableBytesWritable.class);
job.setOutputValueClass(Writable.class);
if (partitioner == HRegionPartitioner.class) {
job.setPartitionerClass(HRegionPartitioner.class);
int regions = MetaReader.getRegionCount(conf, table);
if (job.getNumReduceTasks() > regions) {
job.setNumReduceTasks(regions);
}
} else if (partitioner != null) {
job.setPartitionerClass(partitioner);
}
if (addDependencyJars) {
addDependencyJars(job);
}
initCredentials(job);
}
/**
* Ensures that the given number of reduce tasks for the given job
* configuration does not exceed the number of regions for the given table.
*
* @param table The table to get the region count for.
* @param job The current job to adjust.
* @throws IOException When retrieving the table details fails.
*/
public static void limitNumReduceTasks(String table, Job job)
throws IOException {
int regions = MetaReader.getRegionCount(job.getConfiguration(), table);
if (job.getNumReduceTasks() > regions)
job.setNumReduceTasks(regions);
}
/**
* Sets the number of reduce tasks for the given job configuration to the
* number of regions the given table has.
*
* @param table The table to get the region count for.
* @param job The current job to adjust.
* @throws IOException When retrieving the table details fails.
*/
public static void setNumReduceTasks(String table, Job job)
throws IOException {
job.setNumReduceTasks(MetaReader.getRegionCount(job.getConfiguration(), table));
}
/**
* Sets the number of rows to return and cache with each scanner iteration.
* Higher caching values will enable faster mapreduce jobs at the expense of
* requiring more heap to contain the cached rows.
*
* @param job The current job to adjust.
* @param batchSize The number of rows to return in batch with each scanner
* iteration.
*/
public static void setScannerCaching(Job job, int batchSize) {
job.getConfiguration().setInt("hbase.client.scanner.caching", batchSize);
}
/**
* Add HBase and its dependencies (only) to the job configuration.
* <p>
* This is intended as a low-level API, facilitating code reuse between this
* class and its mapred counterpart. It also of use to extenral tools that
* need to build a MapReduce job that interacts with HBase but want
* fine-grained control over the jars shipped to the cluster.
* </p>
* @param conf The Configuration object to extend with dependencies.
* @see org.apache.hadoop.hbase.mapred.TableMapReduceUtil
* @see <a href="https://issues.apache.org/jira/browse/PIG-3285">PIG-3285</a>
*/
public static void addHBaseDependencyJars(Configuration conf) throws IOException {
addDependencyJars(conf,
// explicitly pull a class from each module
org.apache.hadoop.hbase.HConstants.class, // hbase-common
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.class, // hbase-protocol
org.apache.hadoop.hbase.client.Put.class, // hbase-client
org.apache.hadoop.hbase.CompatibilityFactory.class, // hbase-hadoop-compat
org.apache.hadoop.hbase.mapreduce.TableMapper.class, // hbase-server
// pull necessary dependencies
org.apache.zookeeper.ZooKeeper.class,
org.jboss.netty.channel.ChannelFactory.class,
com.google.protobuf.Message.class,
com.google.common.collect.Lists.class,
org.cloudera.htrace.Trace.class);
}
/**
* Returns a classpath string built from the content of the "tmpjars" value in {@code conf}.
* Also exposed to shell scripts via `bin/hbase mapredcp`.
*/
public static String buildDependencyClasspath(Configuration conf) {
if (conf == null) {
throw new IllegalArgumentException("Must provide a configuration object.");
}
Set<String> paths = new HashSet<String>(conf.getStringCollection("tmpjars"));
if (paths.size() == 0) {
throw new IllegalArgumentException("Configuration contains no tmpjars.");
}
StringBuilder sb = new StringBuilder();
for (String s : paths) {
// entries can take the form 'file:/path/to/file.jar'.
int idx = s.indexOf(":");
if (idx != -1) s = s.substring(idx + 1);
if (sb.length() > 0) sb.append(File.pathSeparator);
sb.append(s);
}
return sb.toString();
}
/**
* Add the HBase dependency jars as well as jars for any of the configured
* job classes to the job configuration, so that JobClient will ship them
* to the cluster and add them to the DistributedCache.
*/
public static void addDependencyJars(Job job) throws IOException {
addHBaseDependencyJars(job.getConfiguration());
try {
addDependencyJars(job.getConfiguration(),
// when making changes here, consider also mapred.TableMapReduceUtil
// pull job classes
job.getMapOutputKeyClass(),
job.getMapOutputValueClass(),
job.getInputFormatClass(),
job.getOutputKeyClass(),
job.getOutputValueClass(),
job.getOutputFormatClass(),
job.getPartitionerClass(),
job.getCombinerClass());
} catch (ClassNotFoundException e) {
throw new IOException(e);
}
}
/**
* Add the jars containing the given classes to the job's configuration
* such that JobClient will ship them to the cluster and add them to
* the DistributedCache.
*/
public static void addDependencyJars(Configuration conf,
Class<?>... classes) throws IOException {
FileSystem localFs = FileSystem.getLocal(conf);
Set<String> jars = new HashSet<String>();
// Add jars that are already in the tmpjars variable
jars.addAll(conf.getStringCollection("tmpjars"));
// add jars as we find them to a map of contents jar name so that we can avoid
// creating new jars for classes that have already been packaged.
Map<String, String> packagedClasses = new HashMap<String, String>();
// Add jars containing the specified classes
for (Class<?> clazz : classes) {
if (clazz == null) continue;
Path path = findOrCreateJar(clazz, localFs, packagedClasses);
if (path == null) {
LOG.warn("Could not find jar for class " + clazz +
" in order to ship it to the cluster.");
continue;
}
if (!localFs.exists(path)) {
LOG.warn("Could not validate jar file " + path + " for class "
+ clazz);
continue;
}
jars.add(path.toString());
}
if (jars.isEmpty()) return;
conf.set("tmpjars", StringUtils.arrayToString(jars.toArray(new String[jars.size()])));
}
/**
* If org.apache.hadoop.util.JarFinder is available (0.23+ hadoop), finds
* the Jar for a class or creates it if it doesn't exist. If the class is in
* a directory in the classpath, it creates a Jar on the fly with the
* contents of the directory and returns the path to that Jar. If a Jar is
* created, it is created in the system temporary directory. Otherwise,
* returns an existing jar that contains a class of the same name. Maintains
* a mapping from jar contents to the tmp jar created.
* @param my_class the class to find.
* @param fs the FileSystem with which to qualify the returned path.
* @param packagedClasses a map of class name to path.
* @return a jar file that contains the class.
* @throws IOException
*/
private static Path findOrCreateJar(Class<?> my_class, FileSystem fs,
Map<String, String> packagedClasses)
throws IOException {
// attempt to locate an existing jar for the class.
String jar = findContainingJar(my_class, packagedClasses);
if (null == jar || jar.isEmpty()) {
jar = getJar(my_class);
updateMap(jar, packagedClasses);
}
if (null == jar || jar.isEmpty()) {
return null;
}
LOG.debug(String.format("For class %s, using jar %s", my_class.getName(), jar));
return new Path(jar).makeQualified(fs);
}
/**
* Add entries to <code>packagedClasses</code> corresponding to class files
* contained in <code>jar</code>.
* @param jar The jar who's content to list.
* @param packagedClasses map[class -> jar]
*/
private static void updateMap(String jar, Map<String, String> packagedClasses) throws IOException {
if (null == jar || jar.isEmpty()) {
return;
}
ZipFile zip = null;
try {
zip = new ZipFile(jar);
for (Enumeration<? extends ZipEntry> iter = zip.entries(); iter.hasMoreElements();) {
ZipEntry entry = iter.nextElement();
if (entry.getName().endsWith("class")) {
packagedClasses.put(entry.getName(), jar);
}
}
} finally {
if (null != zip) zip.close();
}
}
/**
* Find a jar that contains a class of the same name, if any. It will return
* a jar file, even if that is not the first thing on the class path that
* has a class with the same name. Looks first on the classpath and then in
* the <code>packagedClasses</code> map.
* @param my_class the class to find.
* @return a jar file that contains the class, or null.
* @throws IOException
*/
private static String findContainingJar(Class<?> my_class, Map<String, String> packagedClasses)
throws IOException {
ClassLoader loader = my_class.getClassLoader();
String class_file = my_class.getName().replaceAll("\\.", "/") + ".class";
// first search the classpath
for (Enumeration<URL> itr = loader.getResources(class_file); itr.hasMoreElements();) {
URL url = itr.nextElement();
if ("jar".equals(url.getProtocol())) {
String toReturn = url.getPath();
if (toReturn.startsWith("file:")) {
toReturn = toReturn.substring("file:".length());
}
// URLDecoder is a misnamed class, since it actually decodes
// x-www-form-urlencoded MIME type rather than actual
// URL encoding (which the file path has). Therefore it would
// decode +s to ' 's which is incorrect (spaces are actually
// either unencoded or encoded as "%20"). Replace +s first, so
// that they are kept sacred during the decoding process.
toReturn = toReturn.replaceAll("\\+", "%2B");
toReturn = URLDecoder.decode(toReturn, "UTF-8");
return toReturn.replaceAll("!.*$", "");
}
}
// now look in any jars we've packaged using JarFinder. Returns null when
// no jar is found.
return packagedClasses.get(class_file);
}
/**
* Invoke 'getJar' on a JarFinder implementation. Useful for some job
* configuration contexts (HBASE-8140) and also for testing on MRv2. First
* check if we have HADOOP-9426. Lacking that, fall back to the backport.
* @param my_class the class to find.
* @return a jar file that contains the class, or null.
*/
private static String getJar(Class<?> my_class) {
String ret = null;
String hadoopJarFinder = "org.apache.hadoop.util.JarFinder";
Class<?> jarFinder = null;
try {
LOG.debug("Looking for " + hadoopJarFinder + ".");
jarFinder = Class.forName(hadoopJarFinder);
LOG.debug(hadoopJarFinder + " found.");
Method getJar = jarFinder.getMethod("getJar", Class.class);
ret = (String) getJar.invoke(null, my_class);
} catch (ClassNotFoundException e) {
LOG.debug("Using backported JarFinder.");
ret = JarFinder.getJar(my_class);
} catch (InvocationTargetException e) {
// function was properly called, but threw it's own exception. Unwrap it
// and pass it on.
throw new RuntimeException(e.getCause());
} catch (Exception e) {
// toss all other exceptions, related to reflection failure
throw new RuntimeException("getJar invocation failed.", e);
}
return ret;
}
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe.trimming;
import com.google.common.base.Preconditions;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.function.UnaryOperator;
/**
* Cache which tracks canonical invocations and matches keys to equivalent keys (after trimming).
*
* <p>This cache can be built independently of the massive build dependency that is build-base
* (SkyFunctions and BuildConfiguration and so on), and so it is - thus, it uses type parameters to
* speak more abstractly about what it cares about.
*
* <p>Consider a {@code <KeyT>} as a pair of {@code <DescriptorT>} and {@code <ConfigurationT>}. The
* descriptor describes what the key builds, while the configuration describes how to build it.
*
* <p>For example, a ConfiguredTargetKey is made up of a Label, which is its descriptor, and a
* BuildConfiguration, which is its configuration. An AspectKey is made up of a Label and a set of
* AspectDescriptors describing the aspect and the aspects it depends on, all of which are part of
* the AspectKey's descriptor, and also has a BuildConfiguration, which is its configuration.
*
* <p>A key always uses all of its descriptor, but it may only use part of its configuration. A Java
* configured target may have no use for Python configuration, for example. Thus, it would produce
* the same result to evaluate that target with a configuration which doesn't include Python data.
* Reducing the configuration to the subset configuration which only includes the bits the target
* actually needs is called trimming the configuration.
*
* <p>If this trimmed configuration is a subset of another configuration, then building whatever the
* descriptor refers to with that other configuration will produce the same result as the trimmed
* configuration, which is the same result as the configuration that the trimmed configuration was
* trimmed from.
*
* <p>This cache provides methods for matching keys which would evaluate to the same result because
* they have the same descriptor and trim to the same configuration, allowing callers to avoid doing
* work that has already been done. It also permits invalidating, revalidating, and removing these
* keys, as might happen during their lifecycle (if something they depend on has changed, etc.).
*
* <p>Internally, this cache is essentially a very sparse table. Each row, headed by a descriptor,
* describes the possible configurations of that descriptor. Columns, headed by a trimmed
* configuration, represent minimal configurations that descriptors can be invoked with. And a cell
* contains the key which corresponds to the canonical invocation of that descriptor with that
* configuration.
*
* <p>This class expects to be used in ways which are consistent with trimming. That is to say:
*
* <ul>
* <li>If the same key is put in the cache twice with different trimmed configurations, it must be
* invalidated between the two puts. Afterward, the original trimmed configuration is no
* longer valid for the rest of this build.
* <li>No trimmed configuration must be put in the cache which has equal values for every fragment
* it shares with another trimmed configuration already in the cache, unless the key
* associated with the other configuration has been invalidated. Afterward, the configuration
* which had previously been invalidated is no longer valid for the rest of this build.
* <li>Methods which read and add to the cache - {@link #get(KeyT)}, {@link #revalidate(KeyT)},
* and {@link #putIfAbsent(KeyT, ConfigurationT)} - may be used together in one phase of the
* build. Methods which remove from the cache - {@link #invalidate(KeyT)}, {@link
* #remove(KeyT)}, and {@link #clear()} - may be used together in another phase of the build.
* Calls to these groups of methods must never be interleaved.
* </ul>
*
* <p>If used as described above, this class is thread-safe.
*/
public final class TrimmedConfigurationCache<KeyT, DescriptorT, ConfigurationT> {
// ======== Tuning parameters ==========
/** The initial capacity of the cache of descriptors. */
private static final int CACHE_INITIAL_SIZE = 100;
/** The table density for the cache of descriptors. */
private static final float CACHE_LOAD_FACTOR = 0.9f;
/** The number of threads expected to be writing to the descriptor cache at a time. */
private static final int CACHE_CONCURRENCY_LEVEL = 16;
/**
* The number of configurations to expect in a single descriptor - that is, the initial capacity
* of descriptors' maps.
*/
private static final int EXPECTED_CONFIGURATIONS_PER_DESCRIPTOR = 4;
/**
* The table density for the {@link ConcurrentHashMap ConcurrentHashMaps} created for tracking
* configurations of each descriptor.
*/
private static final float DESCRIPTOR_LOAD_FACTOR = 0.9f;
/** The number of threads expected to be writing to a single descriptor at a time. */
private static final int DESCRIPTOR_CONCURRENCY_LEVEL = 1;
private final Function<KeyT, DescriptorT> descriptorExtractor;
private final Function<KeyT, ConfigurationT> configurationExtractor;
private final ConfigurationComparer<ConfigurationT> configurationComparer;
private volatile ConcurrentHashMap<
DescriptorT, ConcurrentHashMap<ConfigurationT, KeyAndState<KeyT>>>
descriptors;
/**
* Constructs a new TrimmedConfigurationCache with the given methods of extracting descriptors and
* configurations from keys, and uses the given predicate to determine the relationship between
* two configurations.
*
* <p>{@code configurationComparer} should be consistent with equals - that is,
* {@code a.equals(b) == b.equals(a) == configurationComparer.compare(a, b).equals(Result.EQUAL)}
*/
public TrimmedConfigurationCache(
Function<KeyT, DescriptorT> descriptorExtractor,
Function<KeyT, ConfigurationT> configurationExtractor,
ConfigurationComparer<ConfigurationT> configurationComparer) {
this.descriptorExtractor = descriptorExtractor;
this.configurationExtractor = configurationExtractor;
this.configurationComparer = configurationComparer;
this.descriptors = newCacheMap();
}
/**
* Looks for a key with the same descriptor as the input key, which has a configuration that
* trimmed to a subset of the input key's.
*
* <p>Note that this is not referring to a <em>proper</em> subset; it's quite possible for a key
* to "trim" to a configuration equal to its configuration. That is, without anything being
* removed.
*
* <p>If such a key has been added to this cache, it is returned in a present {@link Optional}.
* Invoking this key will produce the same result as invoking the input key.
*
* <p>If no such key has been added to this cache, or if a key has been added to the cache and
* subsequently been the subject of an {@link #invalidate(KeyT)}, an absent Optional will be
* returned instead. No currently-valid key has trimmed to an equivalent configuration, and so the
* input key should be executed.
*/
public Optional<KeyT> get(KeyT input) {
DescriptorT descriptor = getDescriptorFor(input);
ConcurrentHashMap<ConfigurationT, KeyAndState<KeyT>> trimmingsOfDescriptor =
descriptors.get(descriptor);
if (trimmingsOfDescriptor == null) {
// There are no entries at all for this descriptor.
return Optional.empty();
}
ConfigurationT candidateConfiguration = getConfigurationFor(input);
for (Entry<ConfigurationT, KeyAndState<KeyT>> entry : trimmingsOfDescriptor.entrySet()) {
ConfigurationT trimmedConfig = entry.getKey();
KeyAndState<KeyT> canonicalKeyAndState = entry.getValue();
if (canSubstituteFor(candidateConfiguration, trimmedConfig, canonicalKeyAndState)) {
return Optional.of(canonicalKeyAndState.getKey());
}
}
return Optional.empty();
}
/**
* Returns whether the given trimmed configuration and key are a suitable substitute for the
* candidate configuration.
*/
private boolean canSubstituteFor(
ConfigurationT candidateConfiguration,
ConfigurationT trimmedConfiguration,
KeyAndState<KeyT> canonicalKeyAndState) {
return canonicalKeyAndState.getState().isKnownValid()
&& compareConfigurations(trimmedConfiguration, candidateConfiguration).isSubsetOrEqual();
}
/**
* Attempts to record the given key as the canonical invocation for its descriptor and the
* passed-in trimmed configuration.
*
* <p>The trimmed configuration must be a subset of the input key's configuration. Otherwise,
* {@link IllegalArgumentException} will be thrown.
*
* <p>If another key matching this configuration is found, that key will be returned. That key
* represents the canonical invocation, which should produce the same result as the input key. It
* may have been previously invalidated, but will be considered revalidated at this point.
*
* <p>Otherwise, if the input key is the first to trim to this configuration, the input key is
* returned.
*/
public KeyT putIfAbsent(KeyT canonicalKey, ConfigurationT trimmedConfiguration) {
ConfigurationT fullConfiguration = getConfigurationFor(canonicalKey);
Preconditions.checkArgument(
compareConfigurations(trimmedConfiguration, fullConfiguration).isSubsetOrEqual());
ConcurrentHashMap<ConfigurationT, KeyAndState<KeyT>> trimmingsOfDescriptor =
descriptors.computeIfAbsent(getDescriptorFor(canonicalKey), unused -> newDescriptorMap());
KeyAndState<KeyT> currentMapping =
trimmingsOfDescriptor.compute(
trimmedConfiguration,
(configuration, currentValue) -> {
if (currentValue == null) {
return KeyAndState.create(canonicalKey);
} else {
return currentValue.asValidated();
}
});
boolean newlyAdded = currentMapping.getKey().equals(canonicalKey);
int failedRemoves;
do {
failedRemoves = 0;
for (Entry<ConfigurationT, KeyAndState<KeyT>> entry : trimmingsOfDescriptor.entrySet()) {
if (entry.getValue().getState().equals(KeyAndState.State.POSSIBLY_INVALID)) {
// Remove invalidated keys where:
// * the same key evaluated to a different configuration than it does now
// * (for trimmed configurations not yet seen) the new trimmed configuration has equal
// values for every fragment it shares with the old configuration (including subsets
// or supersets).
// These are keys we know will not be revalidated as part of the current build.
// Although it also ensures that we don't remove the entry we just added, the check for
// invalidation is mainly to avoid wasting time checking entries that are still valid for
// the current build and therefore will not match either of these properties.
if (entry.getValue().getKey().equals(canonicalKey)
|| (newlyAdded
&& compareConfigurations(trimmedConfiguration, entry.getKey())
.hasEqualSharedFragments())) {
if (!trimmingsOfDescriptor.remove(entry.getKey(), entry.getValue())) {
// It's possible that this entry was removed by another thread in the meantime.
failedRemoves += 1;
}
}
}
}
} while (failedRemoves > 0);
return currentMapping.getKey();
}
/**
* Marks the given key as invalidated.
*
* <p>An invalidated key will not be returned from {@link #get(KeyT)}, as it cannot be proven that
* the key will still trim to the same configuration.
*
* <p>This invalidation is undone if the input key is passed to {@link #revalidate(KeyT)}, or if
* the configuration it originally trimmed to is passed to a call of {@link putIfAbsent(KeyT,
* ConfigurationT)}. This is true regardless of whether the key passed to putIfAbsent is the same
* as the input to this method.
*
* <p>If the key is not currently canonical for any descriptor/configuration pair, or if the key
* had previously been invalidated and not revalidated, this method has no effect.
*/
public void invalidate(KeyT key) {
updateEntryWithRetries(key, KeyAndState::asInvalidated);
}
/**
* Unmarks the given key as invalidated.
*
* <p>This undoes the effects of {@link #invalidate(KeyT)}, allowing the key to be returned from
* {@link #get(KeyT)} again.
*
* <p>If the key is not currently canonical for any descriptor/configuration pair, or if the key
* had not previously been invalidated or had since been revalidated, this method has no effect.
*/
public void revalidate(KeyT key) {
updateEntryWithRetries(key, KeyAndState::asValidated);
}
/**
* Completely removes the given key from the cache.
*
* <p>After this call, {@link #get(KeyT)} and {@link #putIfAbsent(KeyT, ConfigurationT)} will no
* longer return this key unless it is put back in the cache with putIfAbsent.
*
* <p>If the key is not currently canonical for any descriptor/configuration pair, this method has
* no effect.
*/
public void remove(KeyT key) {
// Return null from the transformer to remove the key from the map.
updateEntryWithRetries(key, unused -> null);
DescriptorT descriptor = getDescriptorFor(key);
ConcurrentHashMap<ConfigurationT, KeyAndState<KeyT>> trimmingsOfDescriptor =
descriptors.get(descriptor);
if (trimmingsOfDescriptor != null && trimmingsOfDescriptor.isEmpty()) {
descriptors.remove(descriptor, trimmingsOfDescriptor);
}
}
/**
* Finds the entry in the cache where the given key is canonical and updates or removes it.
*
* <p>The transformation is applied transactionally; that is, if another change has happened since
* the value was first looked up, the new value is retrieved and the transformation is applied
* again. This repeats until there are no conflicts.
*
* <p>This method has no effect if this key is currently not canonical.
*
* @param transformation The transformation to apply to the given entry. The entry will be
* replaced with the value returned from invoking this on the original value. If it returns
* null, the entry will be removed instead. If it returns the same instance, nothing will be
* done to the entry.
*/
private void updateEntryWithRetries(KeyT key, UnaryOperator<KeyAndState<KeyT>> transformation) {
while (!updateEntryIfNoConflicts(key, transformation)) {}
}
/**
* Finds the entry in the cache where the given key is canonical and updates or removes it.
*
* <p>Only one attempt is made, and if there's a collision with another change, false is returned
* and the map is not changed.
*
* <p>This method succeeds (returns {@code true}) without doing anything if this key is currently
* not canonical.
*
* @param transformation The transformation to apply to the given entry. The entry will be
* replaced with the value returned from invoking this on the original value. If it returns
* null, the entry will be removed instead. If it returns the same instance, nothing will be
* done to the entry.
*/
private boolean updateEntryIfNoConflicts(
KeyT key, UnaryOperator<KeyAndState<KeyT>> transformation) {
DescriptorT descriptor = getDescriptorFor(key);
ConcurrentHashMap<ConfigurationT, KeyAndState<KeyT>> trimmingsOfDescriptor =
descriptors.get(descriptor);
if (trimmingsOfDescriptor == null) {
// There are no entries at all for this descriptor.
return true;
}
for (Entry<ConfigurationT, KeyAndState<KeyT>> entry : trimmingsOfDescriptor.entrySet()) {
KeyAndState<KeyT> currentValue = entry.getValue();
if (currentValue.getKey().equals(key)) {
KeyAndState<KeyT> newValue = transformation.apply(currentValue);
if (newValue == null) {
return trimmingsOfDescriptor.remove(entry.getKey(), currentValue);
} else if (newValue != currentValue) {
return trimmingsOfDescriptor.replace(entry.getKey(), currentValue, newValue);
} else {
// newValue == currentValue, there's nothing to do
return true;
}
}
}
// The key requested wasn't in the map, so there's nothing to do
return true;
}
/**
* Removes all keys from this cache, resetting it to its empty state.
*
* <p>This is equivalent to calling {@link #remove(KeyT)} on every key which had ever been passed
* to {@link #putIfAbsent(KeyT, ConfigurationT)}.
*/
public void clear() {
// Getting a brand new instance lets the old map be garbage collected, reducing its memory
// footprint from its previous expansions.
this.descriptors = newCacheMap();
}
/** Retrieves the descriptor by calling the descriptorExtractor. */
private DescriptorT getDescriptorFor(KeyT key) {
return descriptorExtractor.apply(key);
}
/** Retrieves the configuration by calling the configurationExtractor. */
private ConfigurationT getConfigurationFor(KeyT key) {
return configurationExtractor.apply(key);
}
/**
* Checks whether the first configuration is equal to or a subset of the second by calling the
* configurationComparer.
*/
private ConfigurationComparer.Result compareConfigurations(
ConfigurationT left, ConfigurationT right) {
return configurationComparer.apply(left, right);
}
/** Generates a new map suitable for storing the cache as a whole. */
private ConcurrentHashMap<DescriptorT, ConcurrentHashMap<ConfigurationT, KeyAndState<KeyT>>>
newCacheMap() {
return new ConcurrentHashMap<>(CACHE_INITIAL_SIZE, CACHE_LOAD_FACTOR, CACHE_CONCURRENCY_LEVEL);
}
/** Generates a new map suitable for storing the cache of configurations for a descriptor. */
private ConcurrentHashMap<ConfigurationT, KeyAndState<KeyT>> newDescriptorMap() {
return new ConcurrentHashMap<>(
EXPECTED_CONFIGURATIONS_PER_DESCRIPTOR,
DESCRIPTOR_LOAD_FACTOR,
DESCRIPTOR_CONCURRENCY_LEVEL);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.registration;
import org.apache.flink.runtime.registration.RetryingRegistrationTest.TestRegistrationSuccess;
import org.apache.flink.runtime.rpc.RpcService;
import org.apache.flink.runtime.rpc.TestingRpcService;
import org.apache.flink.util.TestLogger;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.LoggerFactory;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Tests for RegisteredRpcConnection, validating the successful, failure and close behavior.
*/
public class RegisteredRpcConnectionTest extends TestLogger {
private TestingRpcService rpcService;
@Before
public void setup() {
rpcService = new TestingRpcService();
}
@After
public void tearDown() throws ExecutionException, InterruptedException {
if (rpcService != null) {
rpcService.stopService().get();
}
}
@Test
public void testSuccessfulRpcConnection() throws Exception {
final String testRpcConnectionEndpointAddress = "<TestRpcConnectionEndpointAddress>";
final UUID leaderId = UUID.randomUUID();
final String connectionID = "Test RPC Connection ID";
// an endpoint that immediately returns success
TestRegistrationGateway testGateway = new TestRegistrationGateway(new RetryingRegistrationTest.TestRegistrationSuccess(connectionID));
try {
rpcService.registerGateway(testRpcConnectionEndpointAddress, testGateway);
TestRpcConnection connection = new TestRpcConnection(testRpcConnectionEndpointAddress, leaderId, rpcService.getExecutor(), rpcService);
connection.start();
//wait for connection established
final String actualConnectionId = connection.getConnectionFuture().get();
// validate correct invocation and result
assertTrue(connection.isConnected());
assertEquals(testRpcConnectionEndpointAddress, connection.getTargetAddress());
assertEquals(leaderId, connection.getTargetLeaderId());
assertEquals(testGateway, connection.getTargetGateway());
assertEquals(connectionID, actualConnectionId);
}
finally {
testGateway.stop();
}
}
@Test
public void testRpcConnectionFailures() throws Exception {
final String connectionFailureMessage = "Test RPC Connection failure";
final String testRpcConnectionEndpointAddress = "<TestRpcConnectionEndpointAddress>";
final UUID leaderId = UUID.randomUUID();
// gateway that upon calls Throw an exception
TestRegistrationGateway testGateway = mock(TestRegistrationGateway.class);
final RuntimeException registrationException = new RuntimeException(connectionFailureMessage);
when(testGateway.registrationCall(any(UUID.class), anyLong())).thenThrow(
registrationException);
rpcService.registerGateway(testRpcConnectionEndpointAddress, testGateway);
TestRpcConnection connection = new TestRpcConnection(testRpcConnectionEndpointAddress, leaderId, rpcService.getExecutor(), rpcService);
connection.start();
//wait for connection failure
try {
connection.getConnectionFuture().get();
fail("expected failure.");
} catch (ExecutionException ee) {
assertEquals(registrationException, ee.getCause());
}
// validate correct invocation and result
assertFalse(connection.isConnected());
assertEquals(testRpcConnectionEndpointAddress, connection.getTargetAddress());
assertEquals(leaderId, connection.getTargetLeaderId());
assertNull(connection.getTargetGateway());
}
@Test
public void testRpcConnectionClose() throws Exception {
final String testRpcConnectionEndpointAddress = "<TestRpcConnectionEndpointAddress>";
final UUID leaderId = UUID.randomUUID();
final String connectionID = "Test RPC Connection ID";
TestRegistrationGateway testGateway = new TestRegistrationGateway(new RetryingRegistrationTest.TestRegistrationSuccess(connectionID));
try {
rpcService.registerGateway(testRpcConnectionEndpointAddress, testGateway);
TestRpcConnection connection = new TestRpcConnection(testRpcConnectionEndpointAddress, leaderId, rpcService.getExecutor(), rpcService);
connection.start();
//close the connection
connection.close();
// validate connection is closed
assertEquals(testRpcConnectionEndpointAddress, connection.getTargetAddress());
assertEquals(leaderId, connection.getTargetLeaderId());
assertTrue(connection.isClosed());
}
finally {
testGateway.stop();
}
}
@Test
public void testReconnect() throws Exception {
final String connectionId1 = "Test RPC Connection ID 1";
final String connectionId2 = "Test RPC Connection ID 2";
final String testRpcConnectionEndpointAddress = "<TestRpcConnectionEndpointAddress>";
final UUID leaderId = UUID.randomUUID();
final TestRegistrationGateway testGateway = new TestRegistrationGateway(
new RetryingRegistrationTest.TestRegistrationSuccess(connectionId1),
new RetryingRegistrationTest.TestRegistrationSuccess(connectionId2));
rpcService.registerGateway(testRpcConnectionEndpointAddress, testGateway);
TestRpcConnection connection = new TestRpcConnection(testRpcConnectionEndpointAddress, leaderId, rpcService.getExecutor(), rpcService);
connection.start();
final String actualConnectionId1 = connection.getConnectionFuture().get();
assertEquals(actualConnectionId1, connectionId1);
assertTrue(connection.tryReconnect());
final String actualConnectionId2 = connection.getConnectionFuture().get();
assertEquals(actualConnectionId2, connectionId2);
}
// ------------------------------------------------------------------------
// test RegisteredRpcConnection
// ------------------------------------------------------------------------
private static class TestRpcConnection extends RegisteredRpcConnection<UUID, TestRegistrationGateway, TestRegistrationSuccess> {
private final Object lock = new Object();
private final RpcService rpcService;
private CompletableFuture<String> connectionFuture;
public TestRpcConnection(String targetAddress, UUID targetLeaderId, Executor executor, RpcService rpcService) {
super(LoggerFactory.getLogger(RegisteredRpcConnectionTest.class), targetAddress, targetLeaderId, executor);
this.rpcService = rpcService;
this.connectionFuture = new CompletableFuture<>();
}
@Override
protected RetryingRegistration<UUID, TestRegistrationGateway, RetryingRegistrationTest.TestRegistrationSuccess> generateRegistration() {
return new RetryingRegistrationTest.TestRetryingRegistration(rpcService, getTargetAddress(), getTargetLeaderId());
}
@Override
protected void onRegistrationSuccess(RetryingRegistrationTest.TestRegistrationSuccess success) {
synchronized (lock) {
connectionFuture.complete(success.getCorrelationId());
}
}
@Override
protected void onRegistrationFailure(Throwable failure) {
synchronized (lock) {
connectionFuture.completeExceptionally(failure);
}
}
@Override
public boolean tryReconnect() {
synchronized (lock) {
connectionFuture.cancel(false);
connectionFuture = new CompletableFuture<>();
}
return super.tryReconnect();
}
public CompletableFuture<String> getConnectionFuture() {
synchronized (lock) {
return connectionFuture;
}
}
}
}
| |
/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.util;
import com.flowpowered.math.vector.Vector3d;
/**
* Represent the 16 main and secondary cardinal directions plus up and down.
* With the following assumptions:
* <ul>
* <li>{@link #NORTH} targeting towards -Z</li>
* <li>{@link #EAST} targeting towards +X</li>
* <li>{@link #SOUTH} targeting towards +Z</li>
* <li>{@link #WEST} targeting towards -X</li>
* <li>{@link #UP} targeting towards +Y</li>
* <li>{@link #DOWN} targeting towards -Y</li>
* </ul>
*/
public enum Direction {
NORTH(new Vector3d(0, 0, -1), Flag.CARDINAL),
NORTH_NORTHEAST(new Vector3d(C.S8, 0, -C.C8), Flag.SECONDARY_ORDINAL),
NORTHEAST(new Vector3d(1, 0, -1), Flag.ORDINAL),
EAST_NORTHEAST(new Vector3d(C.C8, 0, -C.S8), Flag.SECONDARY_ORDINAL),
EAST(new Vector3d(1, 0, 0), Flag.CARDINAL),
EAST_SOUTHEAST(new Vector3d(C.C8, 0, C.S8), Flag.SECONDARY_ORDINAL),
SOUTHEAST(new Vector3d(1, 0, 1), Flag.ORDINAL),
SOUTH_SOUTHEAST(new Vector3d(C.S8, 0, C.C8), Flag.SECONDARY_ORDINAL),
SOUTH(new Vector3d(0, 0, 1), Flag.CARDINAL),
SOUTH_SOUTHWEST(new Vector3d(-C.S8, 0, C.C8), Flag.SECONDARY_ORDINAL),
SOUTHWEST(new Vector3d(-1, 0, 1), Flag.ORDINAL),
WEST_SOUTHWEST(new Vector3d(-C.C8, 0, C.S8), Flag.SECONDARY_ORDINAL),
WEST(new Vector3d(-1, 0, 0), Flag.CARDINAL),
WEST_NORTHWEST(new Vector3d(-C.C8, 0, -C.S8), Flag.SECONDARY_ORDINAL),
NORTHWEST(new Vector3d(-1, 0, -1), Flag.ORDINAL),
NORTH_NORTHWEST(new Vector3d(-C.S8, 0, -C.C8), Flag.SECONDARY_ORDINAL),
UP(new Vector3d(0, 1, 0), Flag.UPRIGHT),
DOWN(new Vector3d(0, -1, 0), Flag.UPRIGHT),
NONE(new Vector3d(0, 0, 0), 0);
private final Vector3d direction;
private final int flags;
private Direction opposite;
Direction(Vector3d vector3d, int flags) {
if (vector3d.lengthSquared() == 0) {
// Prevent normalization of the zero direction
this.direction = vector3d;
} else {
this.direction = vector3d.normalize();
}
this.flags = flags;
}
static {
NORTH.opposite = SOUTH;
EAST.opposite = WEST;
SOUTH.opposite = NORTH;
WEST.opposite = EAST;
UP.opposite = DOWN;
DOWN.opposite = UP;
NONE.opposite = NONE;
NORTHEAST.opposite = SOUTHWEST;
NORTHWEST.opposite = SOUTHEAST;
SOUTHEAST.opposite = NORTHWEST;
SOUTHWEST.opposite = NORTHEAST;
WEST_NORTHWEST.opposite = EAST_SOUTHEAST;
WEST_SOUTHWEST.opposite = EAST_NORTHEAST;
NORTH_NORTHWEST.opposite = SOUTH_SOUTHEAST;
NORTH_NORTHEAST.opposite = SOUTH_SOUTHWEST;
EAST_SOUTHEAST.opposite = WEST_NORTHWEST;
EAST_NORTHEAST.opposite = WEST_SOUTHWEST;
SOUTH_SOUTHEAST.opposite = NORTH_NORTHWEST;
SOUTH_SOUTHWEST.opposite = NORTH_NORTHEAST;
}
/**
* Gets the closest direction from the given vector. If the vector is the
* 0-Vector, this method returns {@link #NONE}. If the vector has the same
* horizontal and vertical length, a horizontal direction will be returned.
* If the vector has the same angle to two directions the clockwise next
* will be selected.
*
* @param vector The vector to convert to a direction
* @return The closest horizontal direction.
*/
public static Direction getClosest(Vector3d vector) {
if (Math.pow(vector.getY(), 2) <= Math.pow(vector.getX(), 2) + Math.pow(vector.getZ(), 2)) {
return getClosestHorizonal(vector);
} else if (vector.getY() > 0) {
return UP;
} else {
return DOWN;
}
}
/**
* Gets the closest horizontal direction from the given vector. If the
* vector is the 0-Vector, this method returns {@link #NONE}. If the vector
* has the same angle to two directions the clockwise next will be selected.
*
* @param vector The vector to convert to a direction
* @return The closest horizontal direction.
*/
public static Direction getClosestHorizonal(Vector3d vector) {
if (vector.getX() == 0) {
if (vector.getZ() == 0) {
return NONE;
} else if (vector.getZ() < 0) {
return NORTH;
} else {
return SOUTH;
}
} else {
final double angle = Math.atan2(vector.getX(), -vector.getZ());
final int ordinal = (int) (angle * 8 / Math.PI + 16.5) % 16;
return values()[ordinal];
}
}
/**
* Gets the direction associated with the given axis.
*
* @param axis The axis
* @return The direction
*/
public static Direction getFromAxis(final Axis axis) {
switch (axis) {
case X:
return SOUTH;
case Y:
return UP;
case Z:
return EAST;
default:
throw new IllegalStateException("Not capable of handling the " + axis.name() + " axis!");
}
}
/**
* Gets the direction of the axis along the given {@link AxisDirection}.
*
* @param axis The axis
* @param direction The direction along the axis
* @return The direction
*/
public static Direction getFromAxis(final Axis axis, final AxisDirection direction) {
switch (direction) {
case PLUS:
return getFromAxis(axis);
case ZERO:
return NONE;
case MINUS:
return getFromAxis(axis).getOpposite();
default:
throw new IllegalStateException("Not capable of handling the " + direction.name() + " direction!");
}
}
/**
* Gets the opposite direction i.e. 180 degrees from this direction.
*
* @return The opposite direction
*/
public Direction getOpposite() {
return this.opposite;
}
/**
* Returns whether the given direction is opposite this.
*
* @param d Direction to test
* @return True if it is opposite
*/
public boolean isOpposite(Direction d) {
return this.opposite.equals(d);
}
/**
* Return true if the direction is of a cardinal direction (north, west
* east, and south).
*
* <p>This evaluates as false for directions that have a non-zero
* Y-component.</p>
*
* @return True if cardinal
*/
public boolean isCardinal() {
return (this.flags & Flag.CARDINAL) > 0;
}
/**
* Return true if the direction is of an ordinal direction (northwest,
* southwest, southeast, northeaast).
*
* @return True if ordinal
*/
public boolean isOrdinal() {
return (this.flags & Flag.ORDINAL) > 0;
}
/**
* Return true if the direction is of a secondary ordinal direction
* (north-northwest, north-northeast, south-southwest, etc.).
*
* @return True if secondary ordinal
*/
public boolean isSecondaryOrdinal() {
return (this.flags & Flag.SECONDARY_ORDINAL) > 0;
}
/**
* Return whether Y component is non-zero.
*
* @return True if the Y component is non-zero
*/
public boolean isUpright() {
return (this.flags & Flag.UPRIGHT) > 0;
}
/**
* Get the Vector3d.
*
* @return The Vector3d
*/
public Vector3d toVector3d() {
return this.direction;
}
private interface C {
double C8 = Math.cos(Math.PI / 8);
double S8 = Math.sin(Math.PI / 8);
}
public static final class Flag {
public static final int CARDINAL = 0x1;
public static final int ORDINAL = 0x2;
public static final int SECONDARY_ORDINAL = 0x4;
public static final int UPRIGHT = 0x8;
public static final int ALL = CARDINAL | ORDINAL | SECONDARY_ORDINAL | UPRIGHT;
private Flag() {
}
}
}
| |
/*
* This file is part of ViaVersion - https://github.com/ViaVersion/ViaVersion
* Copyright (C) 2016-2021 ViaVersion and contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.viaversion.viaversion;
import com.google.gson.JsonObject;
import com.viaversion.viaversion.api.Via;
import com.viaversion.viaversion.api.ViaAPI;
import com.viaversion.viaversion.api.command.ViaCommandSender;
import com.viaversion.viaversion.api.configuration.ConfigurationProvider;
import com.viaversion.viaversion.api.data.MappingDataLoader;
import com.viaversion.viaversion.api.platform.PlatformTask;
import com.viaversion.viaversion.api.platform.UnsupportedSoftware;
import com.viaversion.viaversion.api.platform.ViaPlatform;
import com.viaversion.viaversion.bukkit.classgenerator.ClassGenerator;
import com.viaversion.viaversion.bukkit.commands.BukkitCommandHandler;
import com.viaversion.viaversion.bukkit.commands.BukkitCommandSender;
import com.viaversion.viaversion.bukkit.listeners.ProtocolLibEnableListener;
import com.viaversion.viaversion.bukkit.platform.BukkitViaAPI;
import com.viaversion.viaversion.bukkit.platform.BukkitViaConfig;
import com.viaversion.viaversion.bukkit.platform.BukkitViaInjector;
import com.viaversion.viaversion.bukkit.platform.BukkitViaLoader;
import com.viaversion.viaversion.bukkit.platform.BukkitViaTask;
import com.viaversion.viaversion.bukkit.util.NMSUtil;
import com.viaversion.viaversion.dump.PluginInfo;
import com.viaversion.viaversion.unsupported.UnsupportedSoftwareImpl;
import com.viaversion.viaversion.util.GsonUtil;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.entity.Player;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.java.JavaPlugin;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
public class ViaVersionPlugin extends JavaPlugin implements ViaPlatform<Player> {
private static ViaVersionPlugin instance;
private final BukkitCommandHandler commandHandler;
private final BukkitViaConfig conf;
private final ViaAPI<Player> api = new BukkitViaAPI(this);
private final List<Runnable> queuedTasks = new ArrayList<>();
private final List<Runnable> asyncQueuedTasks = new ArrayList<>();
private final boolean protocolSupport;
private boolean compatSpigotBuild;
private boolean spigot = true;
private boolean lateBind;
public ViaVersionPlugin() {
instance = this;
// Command handler
commandHandler = new BukkitCommandHandler();
// Init platform
BukkitViaInjector injector = new BukkitViaInjector();
Via.init(ViaManagerImpl.builder()
.platform(this)
.commandHandler(commandHandler)
.injector(injector)
.loader(new BukkitViaLoader(this))
.build());
// Config magic
conf = new BukkitViaConfig();
// Check if we're using protocol support too
protocolSupport = Bukkit.getPluginManager().getPlugin("ProtocolSupport") != null;
}
@Override
public void onLoad() {
// Via should load before PL, so we can't check for it in the constructor
boolean hasProtocolLib = Bukkit.getPluginManager().getPlugin("ProtocolLib") != null;
((BukkitViaInjector) Via.getManager().getInjector()).setProtocolLib(hasProtocolLib);
// Spigot detector
try {
Class.forName("org.spigotmc.SpigotConfig");
} catch (ClassNotFoundException e) {
spigot = false;
}
// Check if it's a spigot build with a protocol mod
try {
NMSUtil.nms(
"PacketEncoder",
"net.minecraft.network.PacketEncoder"
).getDeclaredField("version");
compatSpigotBuild = true;
} catch (Exception e) {
compatSpigotBuild = false;
}
if (getServer().getPluginManager().getPlugin("ViaBackwards") != null) {
MappingDataLoader.enableMappingsCache();
}
// Generate classes needed (only works if it's compat or ps)
ClassGenerator.generate();
lateBind = !((BukkitViaInjector) Via.getManager().getInjector()).isBinded();
getLogger().info("ViaVersion " + getDescription().getVersion() + (compatSpigotBuild ? "compat" : "") + " is now loaded" + (lateBind ? ", waiting for boot. (late-bind)" : ", injecting!"));
if (!lateBind) {
((ViaManagerImpl) Via.getManager()).init();
}
}
@Override
public void onEnable() {
if (lateBind) {
((ViaManagerImpl) Via.getManager()).init();
}
getCommand("viaversion").setExecutor(commandHandler);
getCommand("viaversion").setTabCompleter(commandHandler);
getServer().getPluginManager().registerEvents(new ProtocolLibEnableListener(), this);
// Warn them if they have anti-xray on and they aren't using spigot
if (conf.isAntiXRay() && !spigot) {
getLogger().info("You have anti-xray on in your config, since you're not using spigot it won't fix xray!");
}
// Run queued tasks
for (Runnable r : queuedTasks) {
Bukkit.getScheduler().runTask(this, r);
}
queuedTasks.clear();
// Run async queued tasks
for (Runnable r : asyncQueuedTasks) {
Bukkit.getScheduler().runTaskAsynchronously(this, r);
}
asyncQueuedTasks.clear();
}
@Override
public void onDisable() {
((ViaManagerImpl) Via.getManager()).destroy();
}
@Override
public String getPlatformName() {
return Bukkit.getServer().getName();
}
@Override
public String getPlatformVersion() {
return Bukkit.getServer().getVersion();
}
@Override
public String getPluginVersion() {
return getDescription().getVersion();
}
@Override
public PlatformTask runAsync(Runnable runnable) {
if (isPluginEnabled()) {
return new BukkitViaTask(getServer().getScheduler().runTaskAsynchronously(this, runnable));
} else {
asyncQueuedTasks.add(runnable);
return new BukkitViaTask(null);
}
}
@Override
public PlatformTask runSync(Runnable runnable) {
if (isPluginEnabled()) {
return new BukkitViaTask(getServer().getScheduler().runTask(this, runnable));
} else {
queuedTasks.add(runnable);
return new BukkitViaTask(null);
}
}
@Override
public PlatformTask runSync(Runnable runnable, long ticks) {
return new BukkitViaTask(getServer().getScheduler().runTaskLater(this, runnable, ticks));
}
@Override
public PlatformTask runRepeatingSync(Runnable runnable, long ticks) {
return new BukkitViaTask(getServer().getScheduler().runTaskTimer(this, runnable, 0, ticks));
}
@Override
public ViaCommandSender[] getOnlinePlayers() {
ViaCommandSender[] array = new ViaCommandSender[Bukkit.getOnlinePlayers().size()];
int i = 0;
for (Player player : Bukkit.getOnlinePlayers()) {
array[i++] = new BukkitCommandSender(player);
}
return array;
}
@Override
public void sendMessage(UUID uuid, String message) {
Player player = Bukkit.getPlayer(uuid);
if (player != null) {
player.sendMessage(message);
}
}
@Override
public boolean kickPlayer(UUID uuid, String message) {
Player player = Bukkit.getPlayer(uuid);
if (player != null) {
player.kickPlayer(message);
return true;
} else {
return false;
}
}
@Override
public boolean isPluginEnabled() {
return Bukkit.getPluginManager().getPlugin("ViaVersion").isEnabled();
}
@Override
public ConfigurationProvider getConfigurationProvider() {
return conf;
}
@Override
public void onReload() {
if (Bukkit.getPluginManager().getPlugin("ProtocolLib") != null) {
getLogger().severe("ViaVersion is already loaded, we're going to kick all the players... because otherwise we'll crash because of ProtocolLib.");
for (Player player : Bukkit.getOnlinePlayers()) {
player.kickPlayer(ChatColor.translateAlternateColorCodes('&', conf.getReloadDisconnectMsg()));
}
} else {
getLogger().severe("ViaVersion is already loaded, this should work fine. If you get any console errors, try rebooting.");
}
}
@Override
public JsonObject getDump() {
JsonObject platformSpecific = new JsonObject();
List<PluginInfo> plugins = new ArrayList<>();
for (Plugin p : Bukkit.getPluginManager().getPlugins())
plugins.add(new PluginInfo(p.isEnabled(), p.getDescription().getName(), p.getDescription().getVersion(), p.getDescription().getMain(), p.getDescription().getAuthors()));
platformSpecific.add("plugins", GsonUtil.getGson().toJsonTree(plugins));
return platformSpecific;
}
@Override
public boolean isOldClientsAllowed() {
return !protocolSupport; // Use protocolsupport for older clients
}
@Override
public BukkitViaConfig getConf() {
return conf;
}
@Override
public ViaAPI<Player> getApi() {
return api;
}
@Override
public final Collection<UnsupportedSoftware> getUnsupportedSoftwareClasses() {
List<UnsupportedSoftware> list = new ArrayList<>(ViaPlatform.super.getUnsupportedSoftwareClasses());
list.add(new UnsupportedSoftwareImpl.Builder().name("Yatopia").reason(UnsupportedSoftwareImpl.Reason.DANGEROUS_SERVER_SOFTWARE)
.addClassName("org.yatopiamc.yatopia.server.YatopiaConfig")
.addClassName("net.yatopia.api.event.PlayerAttackEntityEvent")
.addClassName("yatopiamc.org.yatopia.server.YatopiaConfig") // Only the best kind of software relocates its own classes to hide itself :tinfoilhat:
.addMethod("org.bukkit.Server", "getLastTickTime").build());
return Collections.unmodifiableList(list);
}
public boolean isLateBind() {
return lateBind;
}
public boolean isCompatSpigotBuild() {
return compatSpigotBuild;
}
public boolean isSpigot() {
return this.spigot;
}
public boolean isProtocolSupport() {
return protocolSupport;
}
public static ViaVersionPlugin getInstance() {
return instance;
}
}
| |
/*
* Copyright 2015, TopicQuests
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.topicquests.ks.api;
import java.util.List;
import net.minidev.json.JSONObject;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.topicquests.support.api.IResult;
import org.topicquests.ks.SystemEnvironment;
import org.topicquests.ks.tm.api.IMergeImplementation;
import org.topicquests.ks.tm.api.IMergeResultsListener;
import org.topicquests.ks.tm.api.ISubjectProxy;
import org.topicquests.ks.tm.api.ISubjectProxyModel;
import org.topicquests.ks.tm.api.ITuple;
import org.topicquests.ks.tm.api.ITupleQuery;
import org.topicquests.ks.tm.merge.VirtualizerHandler;
/**
* @author park
*
*/
public interface ITQDataProvider {
/**
* This allows us to create plug-in dataproviders
* @param env
* @param cachesize
* @return
*/
IResult init(SystemEnvironment env, int cachesize);
/**
* Will return <code>true</code> if <code>errorMessage</code>
* indicates an OptimisticLock as defined in {@link IErrorMessages}
* @param errorMessage
* @return
*/
boolean isOptimisticLockError(String errorMessage);
/**
* Return the {@link INodeModel} installed in this system
* @return
*/
ISubjectProxyModel getSubjectProxyModel();
/**
* Return the {@link ITupleQuery installed in this system
* @return
*/
ITupleQuery getTupleQuery();
/**
* Remove an {@link INode} from the internal cache
* @param nodeLocator
*/
void removeFromCache(String nodeLocator);
/**
* Returns a UUID String
* @return
*/
String getUUID();
/**
* Returns a UUID String with a <code>prefix</code>
* @param prefix
* @return
*/
String getUUID_Pre(String prefix);
/**
* Return a UUID String with a <code>suffix</code>
* @param suffix
* @return
*/
String getUUID_Post(String suffix);
/**
* Load a {@link ITreeNode} starting from <code>rootNodeLocator</code>
* with all its child nodes (<em>subs</em> and <em>instances</em>)
* to a depth defined by <code>maxDepth</code>
* @param rootNodeLocator
* @param maxDepth -1 means no limit
* @param start
* @param count
* @param sortBy TODO
* @param sortDir TODO
* @param credentials
* @return
*/
IResult loadTree(String rootNodeLocator, int maxDepth, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* Given a list of <code>locators</code>, return a list of
* {@link JSONObject} nodes
* @param locators
* @param credentials
* @return list of JSONObjects
*/
IResult multiGetNodes(List<String> locators, ITicket credentials);
////////////////////////////////////
// PROXIES
////////////////////////////////////
/**
* <p>Fetch a node. <code>credentials</code> are required in case
* the node is private and a credential must be tested</p>
* <p>Error message will be returned if the node is private and insufficient
* credentials are presented</p>
* <p>Returns <code>null</code> as the result object if there is no node or
* if credentials are insufficient</p>
* @param locator
* @param credentials
* @return
*/
IResult getNode(String locator, ITicket credentials);
/**
* A <code>URL</code> represented in an internal URL
* property is considered a <em>subject identity</em>
* value, meaning, one and only one proxy can have that
* value.
* @param url
* @param credentials
* @return
*/
IResult getNodeByURL(String url, ITicket credentials);
/**
*
* @param tuple
* @param checkVersion
* @return
*/
IResult putTuple(ITuple tuple, boolean checkVersion);
/**
* Return an <code>ITuple</code> inside an {@link IResult} object or <code>null</code> if not found
* @param tupleLocator
* @param credentials
* @return -- an IResult object that contains either an ITuple or an error message
*/
IResult getTuple(String tupleLocator, ITicket credentials);
/**
* Behaves as if to <em>replace</em> <code>node</code>
* @param node
* @param checkVersion
* @return
*/
IResult updateNode(ISubjectProxy node, boolean checkVersion);
/**
* Returns a raw {@link INode} as a {@link JSONObject}
* @param locator
* @param credentials
* @return
*/
IResult getNodeJSON(String locator, ITicket credentials);
/**
* <p>If <code>node</code> is a <em>merged node</em>, then
* return the <em>virtual node</em> which represents it. Otherwise,
* return <code>null</code> inside {@link IResult}
* @param node
* @param credentials
* @return
*/
IResult getVirtualNodeIfExists(ISubjectProxy node, ITicket credentials);
/**
* Returns a Boolean <code>true</code> if there exists an {@link ITuple} of
* <code>relationLocator</code> and
* either a <em>subject</em> or </em>object</em> identified by <code>theLocator</code>
* @param theLocator
* @param relationLocator
* @return
*/
IResult existsTupleBySubjectOrObjectAndRelation(String theLocator, String relationLocator);
/**
* Returns a Boolean <code>true</code> if an {@link INode} exists for the given
* <code>locator<?code>
* @param locator
* @return
*/
IResult existsNode(String locator);
/**
* <p>Tests whether <code>nodeLocator</code> is of type or a subclass of
* <code>targetTypeLocator</code></p>
* @param nodeLocator
* @param targetTypeLocator
* @param credentials
* @return
*/
IResult nodeIsA(String nodeLocator, String targetTypeLocator, ITicket credentials);
/**
* Assemble a node view based on the node and its various related nodes
* @param locator
* @param credentials
* @return
*/
IResult getNodeView(String locator, ITicket credentials);
/**
* <p>Remove a node from the database</p>
* <p>This is used for all nodes and tuples</p>
* @param locator
* @param credentials
* @return
*/
IResult removeNode(String locator, ITicket credentials);
/**
* Remove <code>node</code>
* @param node
* @param credentials
* @return
*/
IResult removeNode(ISubjectProxy node, ITicket credentials);
/**
* <p>Put <code>node</code> in the database. Subject it to merge and harvest</p>
* <p>Can return an <em>OptimisticLockException</em> error message if version numbers
* are not appropriate.</p>
* @param node
* @return
*/
IResult putNode(ISubjectProxy node);
/**
* Put <code>node</code> in the database. Subject to harvest; no merge performed
* @param node
* @return
*/
IResult putNodeNoMerge(ISubjectProxy node);
/**
* <p>List nodes associated with <code>psi</code></p>
* <p>Note: a <code>psi</code> is theoretically a <em>unique</em> identifier
* or a node; there shoule be just one node returned, if any.</p>
* @param psi
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/
IResult listNodesByPSI(String psi, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* <p>List nodes by the combination of a <code>label</code> and <code>typeLocator</code></p>
* @param label
* @param typeLocator
* @param language
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/
IResult listNodesByLabelAndType(String label, String typeLocator,String language, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* <p>List nodes by <code>label</code></p>
* @param language
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @param lagel
* @return
*/
IResult listNodesByLabel(String label,String language, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* <p>Return nodes with labels that are <em>like</em> <code>labelFragment</code></p>
* <p>A <em>wildcard</em> is added before and after <code>labelFragment</code></p>
* <p>Example: given the string "My favorite topic"; would be matched with My, favorite, or topic</p>
* <p>Results are case sensitive</p>
* @param labelFragment
* @param language
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/
IResult listNodesByLabelLike(String labelFragment, String language, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* <p>Return nodes with details that are <em>like</em> <code>detailsFragment</code></p>
* @param detailsFragment
* @param language
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/
IResult listNodesByDetailsLike(String detailsFragment, String language, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* Answer a particular Solr query string
* @param queryString
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/
IResult listNodesByQuery(String queryString,int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* Return nodes created by <code>creatorId</code>
* @param creatorId
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/
IResult listNodesByCreatorId(String creatorId, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* Return nodes of type <code>typeLocator</code>
* @param typeLocator
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/
IResult listNodesByType(String typeLocator,int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* Really, this is the same as <code>listNodesByType</code>
* @param typeLocator
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortdir can be <code>null</code>
* @param credentials
* @return a list of [@link INode} objects or <code>null</code>
*/
IResult listInstanceNodes(String typeLocator, int start, int count, String sortBy, String sortdir, ITicket credentials);
/**
* <p>List nodes by type, except if any nodes are merged, do not list them. All virtual nodes
* will be listed</p>
* @param typeLocator
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/
IResult listTrimmedInstanceNodes(String typeLocator, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* List nodes which are subclasses of <code>superclassLocator</code>
* @param superclassLocator
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/
IResult listSubclassNodes(String superclassLocator, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* <p>This accepts an {@link ISubjectProxy} expressed as a {@link JSONObject}
* and imports it</p>
* <p>The node might be a new node created in an editor,
* or an edited existing node.</p>
* <p><code>nodeJSON</code> <em>must</em> be a complete proxy representation.</p>
* @param nodeJSON
* @param checkVersion
* @return can return an OptimisticLock exception
*/
IResult updateProxyFromJSON(JSONObject nodeJSON, boolean checkVersion);
IResult updateProxyFromJSON(String jsonString, boolean checkVersion);
/**
* A <code>url</code> is like a PSI: it's an identity property
* @param url
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/ //killed because URL is not going to be like PSI
//IResult getNodeByURL(String url, ITicket credentials);
IResult listNodesByTypeAndURL(String type, String url, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* Update <code>node<code> which had its label or subject changed. This entails
* patching every node that references <code>proxy</code> using its label or subject
* @param node
* @param oldLabel
* @param newLabel
* @param checkVersion
* @param credentials
* @return
*/
IResult updateNodeLabel(ISubjectProxy node, String oldLabel, String newLabel, boolean checkVersion, ITicket credentials);
////////////////////////////////////
// MERGE
////////////////////////////////////
/**
* Support various ways of asserting a merge between two nodes
* @param leftNode
* @param rightNode
* @param reason
* @param userLocator
* @param mergeListener
*/
void mergeTwoProxies(ISubjectProxy leftNode, ISubjectProxy rightNode, String reason, String userLocator, IMergeResultsListener mergeListener);
/**
*
* @param h
*/
void setVirtualizerHandler(VirtualizerHandler h);
/**
* <p>Install an {@link IMergeImplementation} in this system</p>
* <p>The implementation is declared in the <code>config.xml</code> file</p>
* @param merger
*/
void setMergeBean(IMergeImplementation merger);
//////////////////////////////////////////////////
// General query support
//////////////////////////////////////////////////
/**
* <p>Note: <code>queryString</code> is composed of various elements
* which take the form <code>field:stuff</code> where stuff could be
* in the form of text to find, e.g. "over the rainbow". In the case
* of text to find, that text must be escaped by <code>QueryUtil.escapeQueryCulprits(...)</code></p>
* @param queryString
* @param start
* @param count
* @param sortBy can be <code>null</code>
* @param sortDir can be <code>null</code>
* @param credentials
* @return
*/
IResult runQuery(String queryString, int start, int count, String sortBy, String sortDir, ITicket credentials);
/**
* General purpose query handler
* @param qb
* @param credentials
* @return
*/
IResult executeQueryBuilder(SearchSourceBuilder qb, ITicket credentials);
void shutDown();
}
| |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.configure.ui;
import org.eclipse.emf.common.command.CompoundCommand;
import org.eclipse.emf.edit.command.AddCommand;
import org.eclipse.emf.edit.command.RemoveCommand;
import org.eclipse.emf.edit.command.SetCommand;
import org.eclipse.emf.transaction.TransactionalEditingDomain;
import org.eclipse.emf.transaction.util.TransactionUtil;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.TableEditor;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.ValidateFeature;
import org.wso2.developerstudio.eclipse.gmf.esb.ValidateMediator;
public class ValidateFeatureDialog extends Dialog {
private Table featureTable;
private Button addBtn;
private Button removeBtn;
private TableEditor isEnbaledEdior;
private Combo isEnabledcombo;
private ValidateMediator validateMediatoer;
private TransactionalEditingDomain editingDomain;
private CompoundCommand resultCommand;
public ValidateFeatureDialog(Shell parentShell,
ValidateMediator validateMediator) {
super(parentShell);
this.validateMediatoer = validateMediator;
this.editingDomain = TransactionUtil.getEditingDomain(validateMediator);
}
protected Control createDialogArea(Composite parent) {
Composite container = (Composite) super.createDialogArea(parent);
container.setSize(SWT.DEFAULT, 1000);
// Set layout for the main container
FormLayout mainLayout = new FormLayout();
mainLayout.marginHeight = 5;
mainLayout.marginWidth = 5;
container.setLayout(mainLayout);
// setting up the feature table
featureTable = new Table(container, SWT.BORDER | SWT.FULL_SELECTION
| SWT.HIDE_SELECTION);
TableColumn keyTypeColumn = new TableColumn(featureTable, SWT.LEFT);
TableColumn staticKey = new TableColumn(featureTable, SWT.LEFT);
keyTypeColumn.setText("Name");
keyTypeColumn.setWidth(150);
staticKey.setText("Value");
staticKey.setWidth(150);
featureTable.setHeaderVisible(true);
featureTable.setLinesVisible(true);
addBtn = new Button(container, SWT.NONE);
addBtn.setText("Add...");
addBtn.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
TableItem item = bindFeature(EsbFactory.eINSTANCE
.createValidateFeature());
featureTable.select(featureTable.indexOf(item));
}
});
removeBtn = new Button(container, SWT.NONE);
removeBtn.setText("Remove");
removeBtn.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
int selectedIndex = featureTable.getSelectionIndex();
if (-1 != selectedIndex) {
unbindFeature(selectedIndex);
// Select the next available candidate for deletion.
if (selectedIndex < featureTable.getItemCount()) {
featureTable.select(selectedIndex);
} else {
featureTable.select(selectedIndex - 1);
}
}
}
});
Listener policyEntryTableListner = new Listener() {
public void handleEvent(Event evt) {
if (null != evt.item) {
if (evt.item instanceof TableItem) {
TableItem item = (TableItem) evt.item;
editItem(item);
}
}
}
};
featureTable.addListener(SWT.Selection, policyEntryTableListner);
for (ValidateFeature feature : validateMediatoer.getFeatures()) {
bindFeature(feature);
}
setupTableEditor(featureTable);
// Layout related configurations
FormData schemaTableLayoutData = new FormData(SWT.DEFAULT, 150);
schemaTableLayoutData.top = new FormAttachment(0, 0);
schemaTableLayoutData.left = new FormAttachment(0, 0);
featureTable.setLayoutData(schemaTableLayoutData);
FormData frmData = new FormData();
frmData.left = new FormAttachment(featureTable, 5);
frmData.right = new FormAttachment(100, 0);
addBtn.setLayoutData(frmData);
frmData = new FormData();
frmData.top = new FormAttachment(addBtn, 5);
frmData.left = new FormAttachment(featureTable, 5);
removeBtn.setLayoutData(frmData);
return parent;
}
private TableItem bindFeature(ValidateFeature feature) {
TableItem item = new TableItem(featureTable, SWT.NONE);
item.setText(new String[] { feature.getFeatureName(),
Boolean.toString(feature.isFeatureEnabled()) });
item.setData(feature);
return item;
}
private void unbindFeature(int selectedIndex) {
TableItem item = featureTable.getItem(selectedIndex);
ValidateFeature feature = (ValidateFeature) item.getData();
if (null != feature.eContainer()) {
RemoveCommand reoveCmd = new RemoveCommand(editingDomain,
validateMediatoer,
EsbPackage.Literals.VALIDATE_MEDIATOR__FEATURES, feature);
getResultCommand().append(reoveCmd);
}
featureTable.remove(featureTable.indexOf(item));
}
private void editItem(final TableItem item) {
ValidateFeature feature = (ValidateFeature) item.getData();
isEnbaledEdior = initTableEditor(isEnbaledEdior, item.getParent());
isEnabledcombo = new Combo(item.getParent(), SWT.READ_ONLY);
isEnabledcombo.setItems(new String[] { "true", "false" });
isEnabledcombo.setText(Boolean.toString(feature.isFeatureEnabled()));
isEnabledcombo.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
item.setText(1, isEnabledcombo.getText());
}
});
isEnbaledEdior.setEditor(isEnabledcombo, item, 1);
}
private TableEditor initTableEditor(TableEditor editor, Table table) {
if (null != editor) {
Control lastCtrl = editor.getEditor();
if (null != lastCtrl) {
lastCtrl.dispose();
}
}
editor = new TableEditor(table);
editor.horizontalAlignment = SWT.LEFT;
editor.grabHorizontal = true;
return editor;
}
private CompoundCommand getResultCommand() {
if (null == resultCommand) {
resultCommand = new CompoundCommand();
}
return resultCommand;
}
private void setupTableEditor(final Table table) {
final TableEditor cellEditor = new TableEditor(table);
cellEditor.grabHorizontal = true;
cellEditor.minimumWidth = 50;
table.addMouseListener(new MouseAdapter() {
/**
* Setup a new cell editor control at double click event.
*/
public void mouseDoubleClick(MouseEvent e) {
// Dispose the old editor control (if one is setup).
Control oldEditorControl = cellEditor.getEditor();
if (null != oldEditorControl)
oldEditorControl.dispose();
// Mouse location.
Point mouseLocation = new Point(e.x, e.y);
// Grab the selected row.
TableItem item = (TableItem) table.getItem(mouseLocation);
if (null == item)
return;
// Determine which column was selected.
int selectedColumn = -1;
for (int i = 0, n = table.getColumnCount(); i < n; i++) {
if (item.getBounds(i).contains(mouseLocation)) {
selectedColumn = i;
break;
}
}
// Setup a new editor control.
if (-1 != selectedColumn) {
Text editorControl = new Text(table, SWT.NONE);
final int editorControlColumn = selectedColumn;
editorControl.setText(item.getText(selectedColumn));
editorControl.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
Text text = (Text) cellEditor.getEditor();
cellEditor.getItem().setText(editorControlColumn,
text.getText());
}
});
editorControl.selectAll();
editorControl.setFocus();
cellEditor.setEditor(editorControl, item, selectedColumn);
}
}
/**
* Dispose cell editor control at mouse down (otherwise the control
* keep showing).
*/
public void mouseDown(MouseEvent e) {
Control oldEditorControl = cellEditor.getEditor();
if (null != oldEditorControl)
oldEditorControl.dispose();
}
});
}
public void okPressed() {
for (TableItem item : featureTable.getItems()) {
ValidateFeature feature = (ValidateFeature) item.getData();
if (feature.eContainer() == null) {
feature.setFeatureName(item.getText(0));
if (item.getText(1).equals("true")) {
feature.setFeatureEnabled(true);
} else {
feature.setFeatureEnabled(false);
}
AddCommand addCmd = new AddCommand(editingDomain,
validateMediatoer,
EsbPackage.Literals.VALIDATE_MEDIATOR__FEATURES,
feature);
getResultCommand().append(addCmd);
} else {
if (!item.getText(0).equals(feature.getFeatureName())) {
SetCommand setTypeCmd = new SetCommand(
editingDomain,
feature,
EsbPackage.Literals.ABSTRACT_BOOLEAN_FEATURE__FEATURE_NAME,
item.getText(0));
getResultCommand().append(setTypeCmd);
}
if (!item.getText(1).equals(
Boolean.toString(feature.isFeatureEnabled()))) {
SetCommand setTypeCmd = new SetCommand(
editingDomain,
feature,
EsbPackage.Literals.ABSTRACT_BOOLEAN_FEATURE__FEATURE_ENABLED,
item.getText(1));
getResultCommand().append(setTypeCmd);
}
}
}
// Apply changes.
if (getResultCommand().canExecute()) {
editingDomain.getCommandStack().execute(getResultCommand());
}
super.okPressed();
}
protected void configureShell(Shell shell) {
super.configureShell(shell);
shell.setText("Configure Features.");
}
}
| |
package com.linkedin.thirdeye.datasource.pinot;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.linkedin.thirdeye.anomaly.utils.ThirdeyeMetricsUtil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.helix.manager.zk.ZNRecordSerializer;
import org.apache.helix.manager.zk.ZkClient;
import org.apache.http.HttpHost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.linkedin.pinot.client.ResultSet;
import com.linkedin.pinot.client.ResultSetGroup;
import com.linkedin.thirdeye.api.TimeGranularity;
import com.linkedin.thirdeye.api.TimeSpec;
import com.linkedin.thirdeye.dashboard.Utils;
import com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO;
import com.linkedin.thirdeye.datasource.MetricFunction;
import com.linkedin.thirdeye.datasource.ThirdEyeCacheRegistry;
import com.linkedin.thirdeye.datasource.ThirdEyeDataSource;
import com.linkedin.thirdeye.datasource.ThirdEyeRequest;
import com.linkedin.thirdeye.datasource.TimeRangeUtils;
import com.linkedin.thirdeye.util.ThirdEyeUtils;
public class PinotThirdEyeDataSource implements ThirdEyeDataSource {
private static final Logger LOG = LoggerFactory.getLogger(PinotThirdEyeDataSource.class);
private static final ThirdEyeCacheRegistry CACHE_REGISTRY_INSTANCE = ThirdEyeCacheRegistry.getInstance();
private final HttpHost controllerHost;
private final CloseableHttpClient controllerClient;
public static final String DATA_SOURCE_NAME = PinotThirdEyeDataSource.class.getSimpleName();
private PinotDataSourceMaxTime pinotDataSourceMaxTime;
private PinotDataSourceDimensionFilters pinotDataSourceDimensionFilters;
public PinotThirdEyeDataSource(Map<String, String> properties) {
if (!isValidProperties(properties)) {
throw new IllegalStateException("Invalid properties for data source " + DATA_SOURCE_NAME + " " + properties);
}
String host = properties.get(PinotThirdeyeDataSourceProperties.CONTROLLER_HOST.getValue());
int port = Integer.valueOf(properties.get(PinotThirdeyeDataSourceProperties.CONTROLLER_PORT.getValue()));
String zookeeperUrl = properties.get(PinotThirdeyeDataSourceProperties.ZOOKEEPER_URL.getValue());
ZkClient zkClient = new ZkClient(zookeeperUrl);
zkClient.setZkSerializer(new ZNRecordSerializer());
zkClient.waitUntilConnected();
this.controllerHost = new HttpHost(host, port);
this.controllerClient = HttpClients.createDefault();
pinotDataSourceMaxTime = new PinotDataSourceMaxTime();
pinotDataSourceDimensionFilters = new PinotDataSourceDimensionFilters();
LOG.info("Created PinotThirdEyeDataSource with controller {}", controllerHost);
}
protected PinotThirdEyeDataSource(String host, int port) {
this.controllerHost = new HttpHost(host, port);
this.controllerClient = HttpClients.createDefault();
pinotDataSourceMaxTime = new PinotDataSourceMaxTime();
pinotDataSourceDimensionFilters = new PinotDataSourceDimensionFilters();
LOG.info("Created PinotThirdEyeDataSource with controller {}", controllerHost);
}
public static PinotThirdEyeDataSource fromZookeeper(String controllerHost, int controllerPort, String zkUrl) {
ZkClient zkClient = new ZkClient(zkUrl);
zkClient.setZkSerializer(new ZNRecordSerializer());
zkClient.waitUntilConnected();
PinotThirdEyeDataSource pinotThirdEyeDataSource = new PinotThirdEyeDataSource(controllerHost, controllerPort);
LOG.info("Created PinotThirdEyeDataSource to zookeeper: {} controller: {}:{}", zkUrl, controllerHost, controllerPort);
return pinotThirdEyeDataSource;
}
public static ThirdEyeDataSource fromDataSourceConfig(PinotThirdEyeDataSourceConfig pinotDataSourceConfig) {
return fromZookeeper(pinotDataSourceConfig.getControllerHost(), pinotDataSourceConfig.getControllerPort(), pinotDataSourceConfig.getZookeeperUrl());
}
@Override
public String getName() {
return DATA_SOURCE_NAME;
}
@Override
public PinotThirdEyeResponse execute(ThirdEyeRequest request) throws Exception {
long tStart = System.nanoTime();
try {
LinkedHashMap<MetricFunction, List<ResultSet>> metricFunctionToResultSetList = new LinkedHashMap<>();
TimeSpec timeSpec = null;
for (MetricFunction metricFunction : request.getMetricFunctions()) {
String dataset = metricFunction.getDataset();
DatasetConfigDTO datasetConfig = ThirdEyeUtils.getDatasetConfigFromName(dataset);
TimeSpec dataTimeSpec = ThirdEyeUtils.getTimestampTimeSpecFromDatasetConfig(datasetConfig);
if (timeSpec == null) {
timeSpec = dataTimeSpec;
}
Multimap<String, String> decoratedFilterSet = request.getFilterSet();
// Decorate filter set for pre-computed (non-additive) dataset
// NOTE: We do not decorate the filter if the metric name is '*', which is used by count(*) query, because
// the results are usually meta-data and should be shown regardless the filter setting.
if (!datasetConfig.isAdditive() && !"*".equals(metricFunction.getMetricName())) {
decoratedFilterSet =
generateFilterSetWithPreAggregatedDimensionValue(request.getFilterSet(), request.getGroupBy(),
datasetConfig.getDimensions(), datasetConfig.getDimensionsHaveNoPreAggregation(),
datasetConfig.getPreAggregatedKeyword());
}
// By default, query only offline, unless dataset has been marked as realtime
String tableName = ThirdEyeUtils.computeTableName(dataset);
String pql = null;
if (datasetConfig.isMetricAsDimension()) {
pql = PqlUtils.getMetricAsDimensionPql(request, metricFunction, decoratedFilterSet, dataTimeSpec, datasetConfig);
} else {
pql = PqlUtils.getPql(request, metricFunction, decoratedFilterSet, dataTimeSpec);
}
ResultSetGroup resultSetGroup = CACHE_REGISTRY_INSTANCE.getResultSetGroupCache().get(new PinotQuery(pql, tableName));
metricFunctionToResultSetList.put(metricFunction, getResultSetList(resultSetGroup));
}
List<String[]> resultRows = parseResultSets(request, metricFunctionToResultSetList);
PinotThirdEyeResponse resp = new PinotThirdEyeResponse(request, resultRows, timeSpec);
return resp;
} finally {
ThirdeyeMetricsUtil.pinotCallCounter.inc();
ThirdeyeMetricsUtil.pinotDurationCounter.inc(System.nanoTime() - tStart);
}
}
/**
* Definition of Pre-Aggregated Data: the data that has been pre-aggregated or pre-calculated and should not be
* applied with any aggregation function during grouping by. Usually, this kind of data exists in non-additive
* dataset. For such data, we assume that there exists a dimension value named "all", which could be overridden
* in dataset configuration, that stores the pre-aggregated value.
*
* By default, when a query does not specify any value on pre-aggregated dimension, Pinot aggregates all values
* at that dimension, which is an undesirable behavior for non-additive data. Therefore, this method modifies the
* request's dimension filters such that the filter could pick out the "all" value for that dimension. Example:
* Suppose that we have a dataset with 3 pre-aggregated dimensions: country, pageName, and osName, and the pre-
* aggregated keyword is 'all'. Further assume that the original request's filter = {'country'='US, IN'} and
* GroupBy dimension = pageName, then the decorated request has the new filter =
* {'country'='US, IN', 'osName' = 'all'}. Note that 'pageName' = 'all' is not in the filter set because it is
* a GroupBy dimension, which will not be aggregated.
*
* @param filterSet the original filterSet, which will NOT be modified.
*
* @return a decorated filter set for the queries to the pre-aggregated dataset.
*/
public static Multimap<String, String> generateFilterSetWithPreAggregatedDimensionValue(
Multimap<String, String> filterSet, List<String> groupByDimensions, List<String> allDimensions,
List<String> dimensionsHaveNoPreAggregation, String preAggregatedKeyword) {
Set<String> preAggregatedDimensionNames = new HashSet<>(allDimensions);
// Remove dimension names that do not have the pre-aggregated value
if (CollectionUtils.isNotEmpty(dimensionsHaveNoPreAggregation)) {
preAggregatedDimensionNames.removeAll(dimensionsHaveNoPreAggregation);
}
// Remove dimension names that have been included in the original filter set because we should not override
// users' explicit filter setting
if (filterSet != null) {
preAggregatedDimensionNames.removeAll(filterSet.asMap().keySet());
}
// Remove dimension names that are going to be grouped by because GroupBy dimensions will not be aggregated anyway
if (CollectionUtils.isNotEmpty(groupByDimensions)) {
preAggregatedDimensionNames.removeAll(groupByDimensions);
}
// Add pre-aggregated dimension value to the remaining dimension names
Multimap<String, String> decoratedFilterSet;
if (filterSet != null) {
decoratedFilterSet = HashMultimap.create(filterSet);
} else {
decoratedFilterSet = HashMultimap.create();
}
if (preAggregatedDimensionNames.size() != 0) {
for (String preComputedDimensionName : preAggregatedDimensionNames) {
decoratedFilterSet.put(preComputedDimensionName, preAggregatedKeyword);
}
}
return decoratedFilterSet;
}
private static List<ResultSet> getResultSetList(ResultSetGroup resultSetGroup) {
List<ResultSet> resultSets = new ArrayList<>();
for (int i = 0; i < resultSetGroup.getResultSetCount(); i++) {
resultSets.add(resultSetGroup.getResultSet(i));
}
return resultSets;
}
private List<String[]> parseResultSets(ThirdEyeRequest request,
Map<MetricFunction, List<ResultSet>> metricFunctionToResultSetList) throws ExecutionException {
int numGroupByKeys = 0;
boolean hasGroupBy = false;
if (request.getGroupByTimeGranularity() != null) {
numGroupByKeys += 1;
}
if (request.getGroupBy() != null) {
numGroupByKeys += request.getGroupBy().size();
}
if (numGroupByKeys > 0) {
hasGroupBy = true;
}
int numMetrics = request.getMetricFunctions().size();
int numCols = numGroupByKeys + numMetrics;
boolean hasGroupByTime = false;
if (request.getGroupByTimeGranularity() != null) {
hasGroupByTime = true;
}
int position = 0;
LinkedHashMap<String, String[]> dataMap = new LinkedHashMap<>();
for (Entry<MetricFunction, List<ResultSet>> entry : metricFunctionToResultSetList.entrySet()) {
MetricFunction metricFunction = entry.getKey();
String dataset = metricFunction.getDataset();
DatasetConfigDTO datasetConfig = ThirdEyeUtils.getDatasetConfigFromName(dataset);
TimeSpec dataTimeSpec = ThirdEyeUtils.getTimestampTimeSpecFromDatasetConfig(datasetConfig);
TimeGranularity dataGranularity = null;
long startTime = request.getStartTimeInclusive().getMillis();
DateTimeZone dateTimeZone = Utils.getDataTimeZone(dataset);
DateTime startDateTime = new DateTime(startTime, dateTimeZone);
dataGranularity = dataTimeSpec.getDataGranularity();
boolean isISOFormat = false;
DateTimeFormatter inputDataDateTimeFormatter = null;
String timeFormat = dataTimeSpec.getFormat();
if (timeFormat != null && !timeFormat.equals(TimeSpec.SINCE_EPOCH_FORMAT)) {
isISOFormat = true;
inputDataDateTimeFormatter = DateTimeFormat.forPattern(timeFormat).withZone(dateTimeZone);
}
List<ResultSet> resultSets = entry.getValue();
for (int i = 0; i < resultSets.size(); i++) {
ResultSet resultSet = resultSets.get(i);
int numRows = resultSet.getRowCount();
for (int r = 0; r < numRows; r++) {
boolean skipRowDueToError = false;
String[] groupKeys;
if (hasGroupBy) {
groupKeys = new String[resultSet.getGroupKeyLength()];
for (int grpKeyIdx = 0; grpKeyIdx < resultSet.getGroupKeyLength(); grpKeyIdx++) {
String groupKeyVal = "";
try {
groupKeyVal = resultSet.getGroupKeyString(r, grpKeyIdx);
} catch (Exception e) {
// IGNORE FOR NOW, workaround for Pinot Bug
}
if (hasGroupByTime && grpKeyIdx == 0) {
int timeBucket;
long millis;
if (!isISOFormat) {
millis = dataGranularity.toMillis(Double.valueOf(groupKeyVal).longValue());
} else {
millis = DateTime.parse(groupKeyVal, inputDataDateTimeFormatter).getMillis();
}
if (millis < startTime) {
LOG.error("Data point earlier than requested start time {}: {}", new Date(startTime), new Date(millis));
skipRowDueToError = true;
break;
}
timeBucket = TimeRangeUtils
.computeBucketIndex(request.getGroupByTimeGranularity(), startDateTime,
new DateTime(millis, dateTimeZone));
groupKeyVal = String.valueOf(timeBucket);
}
groupKeys[grpKeyIdx] = groupKeyVal;
}
if (skipRowDueToError) {
continue;
}
} else {
groupKeys = new String[] {};
}
StringBuilder groupKeyBuilder = new StringBuilder("");
for (String grpKey : groupKeys) {
groupKeyBuilder.append(grpKey).append("|");
}
String compositeGroupKey = groupKeyBuilder.toString();
String[] rowValues = dataMap.get(compositeGroupKey);
if (rowValues == null) {
rowValues = new String[numCols];
Arrays.fill(rowValues, "0");
System.arraycopy(groupKeys, 0, rowValues, 0, groupKeys.length);
dataMap.put(compositeGroupKey, rowValues);
}
rowValues[groupKeys.length + position + i] =
String.valueOf(Double.parseDouble(rowValues[groupKeys.length + position + i])
+ Double.parseDouble(resultSet.getString(r, 0)));
}
}
position ++;
}
List<String[]> rows = new ArrayList<>();
rows.addAll(dataMap.values());
return rows;
}
@Override
public List<String> getDatasets() throws Exception {
return CACHE_REGISTRY_INSTANCE.getDatasetsCache().getDatasets();
}
@Override
public long getMaxDataTime(String dataset) throws Exception {
return pinotDataSourceMaxTime.getMaxDateTime(dataset);
}
@Override
public Map<String, List<String>> getDimensionFilters(String dataset) throws Exception {
return pinotDataSourceDimensionFilters.getDimensionFilters(dataset);
}
@Override
public void clear() throws Exception {
}
@Override
public void close() throws Exception {
controllerClient.close();
}
private boolean isValidProperties(Map<String, String> properties) {
boolean valid = true;
if (MapUtils.isEmpty(properties)) {
valid = false;
LOG.error("PinotThirdEyeDataSource is missing properties {}", properties);
}
if (!properties.containsKey(PinotThirdeyeDataSourceProperties.CONTROLLER_HOST.getValue())) {
valid = false;
LOG.error("PinotThirdEyeDataSource is missing required property {}", PinotThirdeyeDataSourceProperties.CONTROLLER_HOST.getValue());
}
if (!properties.containsKey(PinotThirdeyeDataSourceProperties.CONTROLLER_PORT.getValue())) {
valid = false;
LOG.error("PinotThirdEyeDataSource is missing required property {}", PinotThirdeyeDataSourceProperties.CONTROLLER_PORT.getValue());
}
if (!properties.containsKey(PinotThirdeyeDataSourceProperties.ZOOKEEPER_URL.getValue())) {
valid = false;
LOG.error("PinotThirdEyeDataSource is missing required property {}", PinotThirdeyeDataSourceProperties.ZOOKEEPER_URL.getValue());
}
if (!properties.containsKey(PinotThirdeyeDataSourceProperties.CLUSTER_NAME.getValue())) {
valid = false;
LOG.error("PinotThirdEyeDataSource is missing required property {}", PinotThirdeyeDataSourceProperties.CLUSTER_NAME.getValue());
}
return valid;
}
/** TESTING ONLY - WE SHOULD NOT BE USING THIS. */
@Deprecated
public static PinotThirdEyeDataSource getDefaultTestDataSource() {
// TODO REPLACE WITH CONFIGS
String controllerHost = "localhost";
int controllerPort = 11984;
String zkUrl =
"localhost:12913/pinot-cluster";
return fromZookeeper(controllerHost, controllerPort, zkUrl);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kms.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kms-2014-11-01/ListKeys" target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListKeysResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list of KMS keys.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<KeyListEntry> keys;
/**
* <p>
* When <code>Truncated</code> is true, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent request.
* </p>
*/
private String nextMarker;
/**
* <p>
* A flag that indicates whether there are more items in the list. When this value is true, the list in this
* response is truncated. To get more items, pass the value of the <code>NextMarker</code> element in thisresponse
* to the <code>Marker</code> parameter in a subsequent request.
* </p>
*/
private Boolean truncated;
/**
* <p>
* A list of KMS keys.
* </p>
*
* @return A list of KMS keys.
*/
public java.util.List<KeyListEntry> getKeys() {
if (keys == null) {
keys = new com.amazonaws.internal.SdkInternalList<KeyListEntry>();
}
return keys;
}
/**
* <p>
* A list of KMS keys.
* </p>
*
* @param keys
* A list of KMS keys.
*/
public void setKeys(java.util.Collection<KeyListEntry> keys) {
if (keys == null) {
this.keys = null;
return;
}
this.keys = new com.amazonaws.internal.SdkInternalList<KeyListEntry>(keys);
}
/**
* <p>
* A list of KMS keys.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setKeys(java.util.Collection)} or {@link #withKeys(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param keys
* A list of KMS keys.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListKeysResult withKeys(KeyListEntry... keys) {
if (this.keys == null) {
setKeys(new com.amazonaws.internal.SdkInternalList<KeyListEntry>(keys.length));
}
for (KeyListEntry ele : keys) {
this.keys.add(ele);
}
return this;
}
/**
* <p>
* A list of KMS keys.
* </p>
*
* @param keys
* A list of KMS keys.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListKeysResult withKeys(java.util.Collection<KeyListEntry> keys) {
setKeys(keys);
return this;
}
/**
* <p>
* When <code>Truncated</code> is true, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent request.
* </p>
*
* @param nextMarker
* When <code>Truncated</code> is true, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent request.
*/
public void setNextMarker(String nextMarker) {
this.nextMarker = nextMarker;
}
/**
* <p>
* When <code>Truncated</code> is true, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent request.
* </p>
*
* @return When <code>Truncated</code> is true, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent request.
*/
public String getNextMarker() {
return this.nextMarker;
}
/**
* <p>
* When <code>Truncated</code> is true, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent request.
* </p>
*
* @param nextMarker
* When <code>Truncated</code> is true, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListKeysResult withNextMarker(String nextMarker) {
setNextMarker(nextMarker);
return this;
}
/**
* <p>
* A flag that indicates whether there are more items in the list. When this value is true, the list in this
* response is truncated. To get more items, pass the value of the <code>NextMarker</code> element in thisresponse
* to the <code>Marker</code> parameter in a subsequent request.
* </p>
*
* @param truncated
* A flag that indicates whether there are more items in the list. When this value is true, the list in this
* response is truncated. To get more items, pass the value of the <code>NextMarker</code> element in
* thisresponse to the <code>Marker</code> parameter in a subsequent request.
*/
public void setTruncated(Boolean truncated) {
this.truncated = truncated;
}
/**
* <p>
* A flag that indicates whether there are more items in the list. When this value is true, the list in this
* response is truncated. To get more items, pass the value of the <code>NextMarker</code> element in thisresponse
* to the <code>Marker</code> parameter in a subsequent request.
* </p>
*
* @return A flag that indicates whether there are more items in the list. When this value is true, the list in this
* response is truncated. To get more items, pass the value of the <code>NextMarker</code> element in
* thisresponse to the <code>Marker</code> parameter in a subsequent request.
*/
public Boolean getTruncated() {
return this.truncated;
}
/**
* <p>
* A flag that indicates whether there are more items in the list. When this value is true, the list in this
* response is truncated. To get more items, pass the value of the <code>NextMarker</code> element in thisresponse
* to the <code>Marker</code> parameter in a subsequent request.
* </p>
*
* @param truncated
* A flag that indicates whether there are more items in the list. When this value is true, the list in this
* response is truncated. To get more items, pass the value of the <code>NextMarker</code> element in
* thisresponse to the <code>Marker</code> parameter in a subsequent request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListKeysResult withTruncated(Boolean truncated) {
setTruncated(truncated);
return this;
}
/**
* <p>
* A flag that indicates whether there are more items in the list. When this value is true, the list in this
* response is truncated. To get more items, pass the value of the <code>NextMarker</code> element in thisresponse
* to the <code>Marker</code> parameter in a subsequent request.
* </p>
*
* @return A flag that indicates whether there are more items in the list. When this value is true, the list in this
* response is truncated. To get more items, pass the value of the <code>NextMarker</code> element in
* thisresponse to the <code>Marker</code> parameter in a subsequent request.
*/
public Boolean isTruncated() {
return this.truncated;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getKeys() != null)
sb.append("Keys: ").append(getKeys()).append(",");
if (getNextMarker() != null)
sb.append("NextMarker: ").append(getNextMarker()).append(",");
if (getTruncated() != null)
sb.append("Truncated: ").append(getTruncated());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListKeysResult == false)
return false;
ListKeysResult other = (ListKeysResult) obj;
if (other.getKeys() == null ^ this.getKeys() == null)
return false;
if (other.getKeys() != null && other.getKeys().equals(this.getKeys()) == false)
return false;
if (other.getNextMarker() == null ^ this.getNextMarker() == null)
return false;
if (other.getNextMarker() != null && other.getNextMarker().equals(this.getNextMarker()) == false)
return false;
if (other.getTruncated() == null ^ this.getTruncated() == null)
return false;
if (other.getTruncated() != null && other.getTruncated().equals(this.getTruncated()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getKeys() == null) ? 0 : getKeys().hashCode());
hashCode = prime * hashCode + ((getNextMarker() == null) ? 0 : getNextMarker().hashCode());
hashCode = prime * hashCode + ((getTruncated() == null) ? 0 : getTruncated().hashCode());
return hashCode;
}
@Override
public ListKeysResult clone() {
try {
return (ListKeysResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.multimap.impl;
import com.hazelcast.config.EntryListenerConfig;
import com.hazelcast.config.MultiMapConfig;
import com.hazelcast.core.EntryListener;
import com.hazelcast.core.HazelcastInstanceAware;
import com.hazelcast.internal.cluster.Versions;
import com.hazelcast.internal.nio.ClassLoaderUtil;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.internal.util.CollectionUtil;
import com.hazelcast.internal.util.ExceptionUtil;
import com.hazelcast.map.impl.DataCollection;
import com.hazelcast.multimap.LocalMultiMapStats;
import com.hazelcast.multimap.MultiMap;
import com.hazelcast.multimap.impl.operations.EntrySetResponse;
import com.hazelcast.multimap.impl.operations.MultiMapResponse;
import com.hazelcast.spi.impl.InitializingObject;
import com.hazelcast.spi.impl.InternalCompletableFuture;
import com.hazelcast.spi.impl.NodeEngine;
import com.hazelcast.splitbrainprotection.SplitBrainProtectionOn;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.EventListener;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.TimeUnit;
import static com.hazelcast.internal.util.Preconditions.checkInstanceOf;
import static com.hazelcast.internal.util.Preconditions.checkNotNull;
import static com.hazelcast.internal.util.Preconditions.checkPositive;
import static com.hazelcast.internal.util.Preconditions.checkTrueUnsupportedOperation;
import static com.hazelcast.internal.util.SetUtil.createHashSet;
@SuppressWarnings("checkstyle:methodcount")
public class MultiMapProxyImpl<K, V>
extends MultiMapProxySupport
implements MultiMap<K, V>, InitializingObject {
protected static final String NULL_KEY_IS_NOT_ALLOWED = "Null key is not allowed!";
protected static final String NULL_VALUE_IS_NOT_ALLOWED = "Null value is not allowed!";
protected static final String NULL_LISTENER_IS_NOT_ALLOWED = "Null listener is not allowed!";
protected static final String MINIMUM_VERSION_ERROR_FORMAT = "{0} is only available with cluster version {1} or greater";
protected static final String MINIMUM_VERSION_ERROR_4_1 = MessageFormat.format(MINIMUM_VERSION_ERROR_FORMAT,
"MultiMap#putAllAsync", "4.1");
public MultiMapProxyImpl(MultiMapConfig config, MultiMapService service, NodeEngine nodeEngine, String name) {
super(config, service, nodeEngine, name);
}
@Override
public void initialize() {
NodeEngine nodeEngine = getNodeEngine();
List<EntryListenerConfig> listenerConfigs = config.getEntryListenerConfigs();
for (EntryListenerConfig listenerConfig : listenerConfigs) {
EntryListener listener = null;
if (listenerConfig.getImplementation() != null) {
EventListener impl = listenerConfig.getImplementation();
listener = checkInstanceOf(EntryListener.class, impl,
impl + " should be an instance of EntryListener");
} else if (listenerConfig.getClassName() != null) {
try {
listener = ClassLoaderUtil.newInstance(nodeEngine.getConfigClassLoader(), listenerConfig.getClassName());
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
}
if (listener != null) {
if (listener instanceof HazelcastInstanceAware) {
((HazelcastInstanceAware) listener).setHazelcastInstance(nodeEngine.getHazelcastInstance());
}
if (listenerConfig.isLocal()) {
addLocalEntryListener(listener);
} else {
addEntryListener(listener, listenerConfig.isIncludeValue());
}
}
}
}
@Override
public CompletionStage<Void> putAllAsync(@Nonnull Map<? extends K, Collection<? extends V>> m) {
checkTrueUnsupportedOperation(isClusterVersionGreaterOrEqual(Versions.V4_1), MINIMUM_VERSION_ERROR_4_1);
InternalCompletableFuture<Void> future = new InternalCompletableFuture<>();
Map<Data, Data> dataMap = new HashMap<>();
for (Map.Entry e : m.entrySet()) {
Collection<Data> dataCollection = CollectionUtil
.objectToDataCollection(((Collection<? extends V>) e.getValue()),
getNodeEngine().getSerializationService());
dataMap.put(toData(e.getKey()), toData(new DataCollection(dataCollection)));
}
putAllInternal(dataMap, future);
return future;
}
@Override
public CompletionStage<Void> putAllAsync(@Nonnull K key, @Nonnull Collection<? extends V> value) {
checkTrueUnsupportedOperation(isClusterVersionGreaterOrEqual(Versions.V4_1), MINIMUM_VERSION_ERROR_4_1);
InternalCompletableFuture<Void> future = new InternalCompletableFuture<>();
Map<Data, Data> dataMap = new HashMap<>();
Collection<Data> dataCollection = CollectionUtil
.objectToDataCollection(value, getNodeEngine().getSerializationService());
dataMap.put(toData(key), toData(new DataCollection(dataCollection)));
putAllInternal(dataMap, future);
return future;
}
@Override
public boolean put(@Nonnull K key, @Nonnull V value) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
checkNotNull(value, NULL_VALUE_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
Data dataValue = nodeEngine.toData(value);
return putInternal(dataKey, dataValue, -1);
}
@Nonnull
@Override
public Collection<V> get(@Nonnull K key) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
MultiMapResponse result = getAllInternal(dataKey);
return result.getObjectCollection(nodeEngine);
}
@Override
public boolean remove(@Nonnull Object key, @Nonnull Object value) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
checkNotNull(value, NULL_VALUE_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
Data dataValue = nodeEngine.toData(value);
return removeInternal(dataKey, dataValue);
}
@Nonnull
@Override
public Collection<V> remove(@Nonnull Object key) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
MultiMapResponse result = removeInternal(dataKey);
return result.getObjectCollection(nodeEngine);
}
public void delete(@Nonnull Object key) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
deleteInternal(dataKey);
}
@Nonnull
@Override
public Set<K> localKeySet() {
ensureNoSplitBrain(SplitBrainProtectionOn.READ);
Set<Data> dataKeySet = localKeySetInternal();
return toObjectSet(dataKeySet);
}
@Nonnull
@Override
public Set<K> keySet() {
Set<Data> dataKeySet = keySetInternal();
return toObjectSet(dataKeySet);
}
@Nonnull
@Override
public Collection<V> values() {
NodeEngine nodeEngine = getNodeEngine();
Map map = valuesInternal();
Collection values = new LinkedList();
for (Object obj : map.values()) {
if (obj == null) {
continue;
}
MultiMapResponse response = nodeEngine.toObject(obj);
values.addAll(response.getObjectCollection(nodeEngine));
}
return values;
}
@Nonnull
@Override
public Set<Map.Entry<K, V>> entrySet() {
NodeEngine nodeEngine = getNodeEngine();
Map map = entrySetInternal();
Set<Map.Entry<K, V>> entrySet = new HashSet<>();
for (Object obj : map.values()) {
if (obj == null) {
continue;
}
EntrySetResponse response = nodeEngine.toObject(obj);
Set<Map.Entry<K, V>> entries = response.getObjectEntrySet(nodeEngine);
entrySet.addAll(entries);
}
return entrySet;
}
@Override
public boolean containsKey(@Nonnull K key) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
return containsInternal(dataKey, null);
}
@Override
public boolean containsValue(@Nonnull Object value) {
checkNotNull(value, NULL_VALUE_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data valueKey = nodeEngine.toData(value);
return containsInternal(null, valueKey);
}
@Override
public boolean containsEntry(@Nonnull K key, @Nonnull V value) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
checkNotNull(value, NULL_VALUE_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
Data valueKey = nodeEngine.toData(value);
return containsInternal(dataKey, valueKey);
}
@Override
public int valueCount(@Nonnull K key) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
return countInternal(dataKey);
}
@Nonnull
@Override
public UUID addLocalEntryListener(@Nonnull EntryListener<K, V> listener) {
checkNotNull(listener, NULL_LISTENER_IS_NOT_ALLOWED);
return getService().addLocalListener(name, listener, null, false);
}
@Nonnull
@Override
public UUID addEntryListener(@Nonnull EntryListener<K, V> listener, boolean includeValue) {
checkNotNull(listener, NULL_LISTENER_IS_NOT_ALLOWED);
return getService().addListener(name, listener, null, includeValue);
}
@Override
public boolean removeEntryListener(@Nonnull UUID registrationId) {
checkNotNull(registrationId, "Registration ID should not be null!");
return getService().removeListener(name, registrationId);
}
@Nonnull
@Override
public UUID addEntryListener(@Nonnull EntryListener<K, V> listener, @Nonnull K key, boolean includeValue) {
checkNotNull(listener, NULL_LISTENER_IS_NOT_ALLOWED);
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
return getService().addListener(name, listener, dataKey, includeValue);
}
@Override
public void lock(@Nonnull K key) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
lockSupport.lock(nodeEngine, dataKey);
}
@Override
public void lock(@Nonnull K key, long leaseTime, @Nonnull TimeUnit timeUnit) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
checkNotNull(timeUnit, "Null timeUnit is not allowed!");
checkPositive("leaseTime", leaseTime);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
lockSupport.lock(nodeEngine, dataKey, timeUnit.toMillis(leaseTime));
}
@Override
public boolean isLocked(@Nonnull K key) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
return lockSupport.isLocked(nodeEngine, dataKey);
}
@Override
public boolean tryLock(@Nonnull K key) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
return lockSupport.tryLock(nodeEngine, dataKey);
}
@Override
public boolean tryLock(@Nonnull K key, long time, TimeUnit timeunit)
throws InterruptedException {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
return lockSupport.tryLock(nodeEngine, dataKey, time, timeunit);
}
@Override
public boolean tryLock(@Nonnull K key,
long time, @Nullable TimeUnit timeunit,
long leaseTime, @Nullable TimeUnit leaseUnit) throws InterruptedException {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
return lockSupport.tryLock(nodeEngine, dataKey, time, timeunit, leaseTime, leaseUnit);
}
@Override
public void unlock(@Nonnull K key) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
lockSupport.unlock(nodeEngine, dataKey);
}
@Override
public void forceUnlock(@Nonnull K key) {
checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED);
NodeEngine nodeEngine = getNodeEngine();
Data dataKey = nodeEngine.toData(key);
lockSupport.forceUnlock(nodeEngine, dataKey);
}
@Nonnull
@Override
public LocalMultiMapStats getLocalMultiMapStats() {
return getService().createStats(name);
}
private Set<K> toObjectSet(Set<Data> dataSet) {
NodeEngine nodeEngine = getNodeEngine();
Set<K> keySet = createHashSet(dataSet.size());
for (Data dataKey : dataSet) {
keySet.add((K) nodeEngine.toObject(dataKey));
}
return keySet;
}
private void ensureNoSplitBrain(SplitBrainProtectionOn requiredSplitBrainProtectionPermissionType) {
getService().ensureNoSplitBrain(name, requiredSplitBrainProtectionPermissionType);
}
}
| |
package com.fsck.k9.mail.store.pop3;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.Socket;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import com.fsck.k9.mail.AuthType;
import com.fsck.k9.mail.AuthenticationFailedException;
import com.fsck.k9.mail.CertificateValidationException;
import com.fsck.k9.mail.CertificateValidationException.Reason;
import com.fsck.k9.mail.ConnectionSecurity;
import com.fsck.k9.mail.MessagingException;
import com.fsck.k9.mail.filter.Base64;
import com.fsck.k9.mail.helpers.TestTrustedSocketFactory;
import com.fsck.k9.mail.ssl.TrustedSocketFactory;
import javax.net.ssl.SSLException;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
public class Pop3ConnectionTest {
private static final String host = "server";
private static final int port = 12345;
private static String username = "user";
private static String password = "password";
private static final String INITIAL_RESPONSE = "+OK POP3 server greeting\r\n";
private static final String AUTH = "AUTH\r\n";
private static final String AUTH_HANDLE_RESPONSE =
"+OK Listing of supported mechanisms follows\r\n" +
"PLAIN\r\n" +
"CRAM-MD5\r\n" +
"EXTERNAL\r\n" +
".\r\n";
private static final String CAPA =
"CAPA\r\n";
private static final String CAPA_RESPONSE =
"+OK Listing of supported mechanisms follows\r\n" +
"PLAIN\r\n" +
"CRAM-MD5\r\n" +
"EXTERNAL\r\n" +
".\r\n";
private static final String AUTH_PLAIN_WITH_LOGIN = "AUTH PLAIN\r\n" +
new String(Base64.encodeBase64(("\000"+username+"\000"+password).getBytes())) + "\r\n";
private static final String AUTH_PLAIN_AUTHENTICATED_RESPONSE = "+OK\r\n" + "+OK\r\n";
private static final String SUCCESSFUL_PLAIN_AUTH = AUTH + CAPA + AUTH_PLAIN_WITH_LOGIN;
private static final String SUCCESSFUL_PLAIN_AUTH_RESPONSE =
INITIAL_RESPONSE +
AUTH_HANDLE_RESPONSE +
CAPA_RESPONSE +
AUTH_PLAIN_AUTHENTICATED_RESPONSE;
/**
private static final String AUTH_PLAIN_FAILED_RESPONSE = "+OK\r\n" + "Plain authentication failure";
private static final String STAT = "STAT\r\n";
private static final String STAT_RESPONSE = "+OK 20 0\r\n";
private static final String UIDL_UNSUPPORTED_RESPONSE = "-ERR UIDL unsupported\r\n";
private static final String UIDL_SUPPORTED_RESPONSE = "+OK UIDL supported\r\n";
**/
private TrustedSocketFactory mockTrustedSocketFactory;
private Socket mockSocket;
private ByteArrayOutputStream outputStreamForMockSocket;
private SimplePop3Settings settings;
private TrustedSocketFactory socketFactory;
@Before
public void before() throws Exception {
createCommonSettings();
createMocks();
socketFactory = TestTrustedSocketFactory.newInstance();
}
private void createCommonSettings() {
settings = new SimplePop3Settings();
settings.setUsername(username);
settings.setPassword(password);
}
private void createMocks()
throws MessagingException, IOException, NoSuchAlgorithmException, KeyManagementException {
mockTrustedSocketFactory = mock(TrustedSocketFactory.class);
mockSocket = mock(Socket.class);
outputStreamForMockSocket = new ByteArrayOutputStream();
when(mockTrustedSocketFactory.createSocket(null, host, port, null))
.thenReturn(mockSocket);
when(mockSocket.getOutputStream()).thenReturn(outputStreamForMockSocket);
when(mockSocket.isConnected()).thenReturn(true);
}
private void addSettingsForValidMockSocket() {
settings.setHost(host);
settings.setPort(port);
settings.setConnectionSecurity(ConnectionSecurity.SSL_TLS_REQUIRED);
}
@Test
public void constructor_doesntCreateSocket() throws Exception {
addSettingsForValidMockSocket();
settings.setAuthType(AuthType.PLAIN);
new Pop3Connection(settings, mockTrustedSocketFactory);
verifyZeroInteractions(mockTrustedSocketFactory);
}
//Using MockSocketFactory
@Test(expected = CertificateValidationException.class)
public void open_whenTrustedSocketFactoryThrowsSSLCertificateException_throwCertificateValidationException()
throws Exception {
when(mockTrustedSocketFactory.createSocket(null, host, port, null)).thenThrow(
new SSLException(new CertificateException()));
addSettingsForValidMockSocket();
settings.setAuthType(AuthType.PLAIN);
Pop3Connection connection = new Pop3Connection(settings, mockTrustedSocketFactory);
connection.open();
}
@Test(expected = MessagingException.class)
public void open_whenTrustedSocketFactoryThrowsCertificateException_throwMessagingException() throws Exception {
when(mockTrustedSocketFactory.createSocket(null, host, port, null)).thenThrow(
new SSLException(""));
addSettingsForValidMockSocket();
settings.setAuthType(AuthType.PLAIN);
Pop3Connection connection = new Pop3Connection(settings, mockTrustedSocketFactory);
connection.open();
}
@Test(expected = MessagingException.class)
public void open_whenTrustedSocketFactoryThrowsGeneralSecurityException_throwMessagingException() throws Exception {
when(mockTrustedSocketFactory.createSocket(null, host, port, null)).thenThrow(
new NoSuchAlgorithmException(""));
addSettingsForValidMockSocket();
settings.setAuthType(AuthType.PLAIN);
Pop3Connection connection = new Pop3Connection(settings, mockTrustedSocketFactory);
connection.open();
}
@Test(expected = MessagingException.class)
public void open_whenTrustedSocketFactoryThrowsIOException_throwMessagingException() throws Exception {
when(mockTrustedSocketFactory.createSocket(null, host, port, null)).thenThrow(
new IOException(""));
addSettingsForValidMockSocket();
settings.setAuthType(AuthType.PLAIN);
Pop3Connection connection = new Pop3Connection(settings, mockTrustedSocketFactory);
connection.open();
}
@Test(expected = MessagingException.class)
public void open_whenSocketNotConnected_throwsMessagingException() throws Exception {
when(mockSocket.isConnected()).thenReturn(false);
addSettingsForValidMockSocket();
settings.setAuthType(AuthType.PLAIN);
Pop3Connection connection = new Pop3Connection(settings, mockTrustedSocketFactory);
connection.open();
}
@Test
public void open_withTLS_authenticatesOverSocket() throws Exception {
when(mockSocket.getInputStream()).thenReturn(new ByteArrayInputStream(SUCCESSFUL_PLAIN_AUTH_RESPONSE.getBytes()));
addSettingsForValidMockSocket();
settings.setAuthType(AuthType.PLAIN);
Pop3Connection connection = new Pop3Connection(settings, mockTrustedSocketFactory);
connection.open();
assertEquals(SUCCESSFUL_PLAIN_AUTH, new String(outputStreamForMockSocket.toByteArray()));
}
//Using both
@Test(expected = CertificateValidationException.class)
public void open_withSTLSunavailable_throwsCertificateValidationException() throws Exception {
MockPop3Server server = setupUnavailableStartTLSConnection();
settings.setAuthType(AuthType.PLAIN);
settings.setConnectionSecurity(ConnectionSecurity.STARTTLS_REQUIRED);
createAndOpenPop3Connection(settings, mockTrustedSocketFactory);
}
@Test
public void open_withSTLSunavailable_doesntCreateSocket() throws Exception {
MockPop3Server server = setupUnavailableStartTLSConnection();
settings.setAuthType(AuthType.PLAIN);
settings.setConnectionSecurity(ConnectionSecurity.STARTTLS_REQUIRED);
try {
Pop3Connection connection = new Pop3Connection(settings, mockTrustedSocketFactory);
connection.open();
} catch (Exception ignored) {
}
verify(mockTrustedSocketFactory, never()).createSocket(any(Socket.class), anyString(),
anyInt(), anyString());
}
@Test(expected = Pop3ErrorResponse.class)
public void open_withStartTLS_withSTLSerr_throwsException() throws Exception {
MockPop3Server server = setupFailedStartTLSConnection();
when(mockTrustedSocketFactory.createSocket(
any(Socket.class), eq(server.getHost()), eq(server.getPort()), eq((String) null)))
.thenReturn(mockSocket);
when(mockSocket.getInputStream()).thenReturn(new ByteArrayInputStream(SUCCESSFUL_PLAIN_AUTH_RESPONSE.getBytes()));
createAndOpenPop3Connection(settings, mockTrustedSocketFactory);
}
@Test
public void open_withStartTLS_withSTLSerr_doesntCreateSocket() throws Exception {
MockPop3Server server = setupFailedStartTLSConnection();;
when(mockTrustedSocketFactory.createSocket(
any(Socket.class), eq(server.getHost()), eq(server.getPort()), eq((String) null)))
.thenReturn(mockSocket);
when(mockSocket.getInputStream()).thenReturn(new ByteArrayInputStream(SUCCESSFUL_PLAIN_AUTH_RESPONSE.getBytes()));
try {
createAndOpenPop3Connection(settings, mockTrustedSocketFactory);
} catch (Exception ignored) {
}
verify(mockTrustedSocketFactory, never()).createSocket(any(Socket.class), anyString(),
anyInt(), anyString());
}
@Test
public void open_withStartTLS_usesSocketFactoryToCreateTLSSocket() throws Exception {
MockPop3Server server = setupStartTLSConnection();
settings.setAuthType(AuthType.PLAIN);
when(mockTrustedSocketFactory.createSocket(
any(Socket.class), eq(server.getHost()), eq(server.getPort()), eq((String) null)))
.thenReturn(mockSocket);
when(mockSocket.getInputStream()).thenReturn(new ByteArrayInputStream(SUCCESSFUL_PLAIN_AUTH_RESPONSE.getBytes()));
createAndOpenPop3Connection(settings, mockTrustedSocketFactory);
verify(mockTrustedSocketFactory).createSocket(any(Socket.class), eq(server.getHost()),
eq(server.getPort()), eq((String) null));
}
@Test(expected = MessagingException.class)
public void open_withStartTLS_whenSocketFactoryThrowsException_ThrowsException() throws Exception {
MockPop3Server server = setupStartTLSConnection();
settings.setAuthType(AuthType.PLAIN);
when(mockTrustedSocketFactory.createSocket(
any(Socket.class), eq(server.getHost()), eq(server.getPort()), eq((String) null)))
.thenThrow(new IOException());
when(mockSocket.getInputStream()).thenReturn(new ByteArrayInputStream(SUCCESSFUL_PLAIN_AUTH_RESPONSE.getBytes()));
createAndOpenPop3Connection(settings, mockTrustedSocketFactory);
verify(mockTrustedSocketFactory).createSocket(any(Socket.class), eq(server.getHost()),
eq(server.getPort()), eq((String) null));
}
@Test
public void open_withStartTLS_authenticatesOverSecureSocket() throws Exception {
MockPop3Server server = setupStartTLSConnection();
settings.setAuthType(AuthType.PLAIN);
when(mockTrustedSocketFactory.createSocket(
any(Socket.class), eq(server.getHost()), eq(server.getPort()), eq((String) null)))
.thenReturn(mockSocket);
when(mockSocket.getInputStream()).thenReturn(new ByteArrayInputStream(SUCCESSFUL_PLAIN_AUTH_RESPONSE.getBytes()));
createAndOpenPop3Connection(settings, mockTrustedSocketFactory);
assertEquals(SUCCESSFUL_PLAIN_AUTH, new String(outputStreamForMockSocket.toByteArray()));
}
private MockPop3Server setupStartTLSConnection() throws IOException {new MockPop3Server();
MockPop3Server server = new MockPop3Server();
setupServerWithStartTLSAvailable(server);
server.expect("STLS");
server.output("+OK Begin TLS negotiation");
server.start();
settings.setHost(server.getHost());
settings.setPort(server.getPort());
settings.setConnectionSecurity(ConnectionSecurity.STARTTLS_REQUIRED);
return server;
}
private MockPop3Server setupFailedStartTLSConnection() throws IOException {new MockPop3Server();
MockPop3Server server = new MockPop3Server();
setupServerWithStartTLSAvailable(server);
server.expect("STLS");
server.output("-ERR Unavailable");
server.start();
settings.setHost(server.getHost());
settings.setPort(server.getPort());
settings.setConnectionSecurity(ConnectionSecurity.STARTTLS_REQUIRED);
return server;
}
private MockPop3Server setupUnavailableStartTLSConnection() throws IOException {new MockPop3Server();
MockPop3Server server = new MockPop3Server();
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output(".");
server.start();
settings.setHost(server.getHost());
settings.setPort(server.getPort());
settings.setConnectionSecurity(ConnectionSecurity.STARTTLS_REQUIRED);
return server;
}
private void setupServerWithStartTLSAvailable(MockPop3Server server) {
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("STLS");
server.output(".");
}
//Using RealSocketFactory with MockPop3Server
@Test
public void open_withAuthTypePlainAndPlainAuthCapability_performsPlainAuth() throws Exception {
settings.setAuthType(AuthType.PLAIN);
MockPop3Server server = new MockPop3Server();
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("AUTH PLAIN");
server.output("+OK");
server.expect(new String(Base64.encodeBase64(("\000"+username+"\000"+password).getBytes())));
server.output("+OK");
startServerAndCreateOpenConnection(server);
server.verifyConnectionStillOpen();
server.verifyInteractionCompleted();
}
@Test
public void open_withAuthTypePlainAndPlainAuthCapabilityAndInvalidPasswordResponse_throwsException() throws Exception {
settings.setAuthType(AuthType.PLAIN);
MockPop3Server server = new MockPop3Server();
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("AUTH PLAIN");
server.output("+OK");
server.expect(new String(Base64.encodeBase64(("\000"+username+"\000"+password).getBytes())));
server.output("-ERR");
try {
startServerAndCreateOpenConnection(server);
fail("Expected auth failure");
} catch (AuthenticationFailedException ignored) {}
server.verifyInteractionCompleted();
}
@Test
public void open_withAuthTypePlainAndNoPlainAuthCapability_performsLogin() throws Exception {
settings.setAuthType(AuthType.PLAIN);
MockPop3Server server = new MockPop3Server();
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("USER user");
server.output("+OK");
server.expect("PASS password");
server.output("-ERR");
try {
startServerAndCreateOpenConnection(server);
fail("Expected auth failure");
} catch (AuthenticationFailedException ignored) {}
server.verifyInteractionCompleted();
}
@Test
public void open_withAuthTypePlainAndNoPlainAuthCapabilityAndLoginFailure_throwsException() throws Exception {
settings.setAuthType(AuthType.PLAIN);
MockPop3Server server = new MockPop3Server();
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("USER user");
server.output("+OK");
server.expect("PASS password");
server.output("+OK");
startServerAndCreateOpenConnection(server);
server.verifyInteractionCompleted();
}
@Test
public void open_withAuthTypeCramMd5AndCapability_performsCramMd5Auth() throws IOException, MessagingException {
settings.setAuthType(AuthType.CRAM_MD5);
MockPop3Server server = new MockPop3Server();
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("AUTH CRAM-MD5");
server.output("+ abcd");
server.expect("dXNlciBhZGFhZTU2Zjk1NzAxZjQwNDQwZjhhMWU2YzY1ZjZmZg==");
server.output("+OK");
startServerAndCreateOpenConnection(server);
server.verifyConnectionStillOpen();
server.verifyInteractionCompleted();
}
@Test
public void open_withAuthTypeCramMd5AndCapabilityAndCramFailure_throwsException() throws IOException, MessagingException {
settings.setAuthType(AuthType.CRAM_MD5);
MockPop3Server server = new MockPop3Server();
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("AUTH CRAM-MD5");
server.output("+ abcd");
server.expect("dXNlciBhZGFhZTU2Zjk1NzAxZjQwNDQwZjhhMWU2YzY1ZjZmZg==");
server.output("-ERR");
try {
startServerAndCreateOpenConnection(server);
fail("Expected auth failure");
} catch (AuthenticationFailedException ignored) {}
server.verifyInteractionCompleted();
}
@Test
public void open_withAuthTypeCramMd5AndNoCapability_performsApopAuth() throws IOException, MessagingException {
settings.setAuthType(AuthType.CRAM_MD5);
MockPop3Server server = new MockPop3Server();
server.output("+OK abc<a>abcd");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("EXTERNAL");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("EXTERNAL");
server.output(".");
server.expect("APOP user c8e8c560e385faaa6367d4145572b8ea");
server.output("+OK");
startServerAndCreateOpenConnection(server);
server.verifyConnectionStillOpen();
server.verifyInteractionCompleted();
}
@Test
public void open_withAuthTypeCramMd5AndNoCapabilityAndApopFailure_throwsException() throws IOException, MessagingException {
settings.setAuthType(AuthType.CRAM_MD5);
MockPop3Server server = new MockPop3Server();
server.output("+OK abc<a>abcd");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("EXTERNAL");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("EXTERNAL");
server.output(".");
server.expect("APOP user c8e8c560e385faaa6367d4145572b8ea");
server.output("-ERR");
try {
startServerAndCreateOpenConnection(server);
fail("Expected auth failure");
} catch (AuthenticationFailedException ignored) {}
server.verifyInteractionCompleted();
}
@Test
public void open_withAuthTypeExternalAndCapability_performsExternalAuth() throws IOException, MessagingException {
settings.setAuthType(AuthType.EXTERNAL);
MockPop3Server server = new MockPop3Server();
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("AUTH EXTERNAL dXNlcg==");
server.output("+OK");
startServerAndCreateOpenConnection(server);
server.verifyConnectionStillOpen();
server.verifyInteractionCompleted();
}
@Test
public void open_withAuthTypeExternalAndNoCapability_throwsCVE() throws IOException, MessagingException {
settings.setAuthType(AuthType.EXTERNAL);
MockPop3Server server = new MockPop3Server();
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
try {
startServerAndCreateOpenConnection(server);
fail("CVE expected");
} catch (CertificateValidationException e) {
assertEquals(Reason.MissingCapability, e.getReason());
}
server.verifyConnectionStillOpen();
server.verifyInteractionCompleted();
}
@Test
public void open_withAuthTypeExternalAndCapability_withRejection_throwsCVE() throws IOException, MessagingException {
settings.setAuthType(AuthType.EXTERNAL);
MockPop3Server server = new MockPop3Server();
server.output("+OK POP3 server greeting");
server.expect("AUTH");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("CAPA");
server.output("+OK Listing of supported mechanisms follows");
server.output("PLAIN");
server.output("CRAM-MD5");
server.output("EXTERNAL");
server.output(".");
server.expect("AUTH EXTERNAL dXNlcg==");
server.output("-ERR Invalid certificate");
try {
startServerAndCreateOpenConnection(server);
fail("CVE expected");
} catch (CertificateValidationException e) {
assertEquals("POP3 client certificate authentication failed: -ERR Invalid certificate", e.getMessage());
}
server.verifyInteractionCompleted();
}
private void startServerAndCreateOpenConnection(MockPop3Server server) throws IOException,
MessagingException {
server.start();
settings.setHost(server.getHost());
settings.setPort(server.getPort());
createAndOpenPop3Connection(settings, socketFactory);
}
private void createAndOpenPop3Connection(Pop3Settings settings, TrustedSocketFactory socketFactory)
throws MessagingException {
Pop3Connection connection = new Pop3Connection(settings, socketFactory);
connection.open();
}
}
| |
package com.googlecode.objectify.cache;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Future;
import com.google.appengine.api.datastore.AsyncDatastoreService;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.datastore.KeyRange;
import com.google.appengine.api.datastore.PreparedQuery;
import com.google.appengine.api.datastore.Query;
import com.google.appengine.api.datastore.Transaction;
import com.google.appengine.api.memcache.Expiration;
import com.google.appengine.api.memcache.MemcacheService;
import com.googlecode.objectify.ObjectifyFactory;
import com.googlecode.objectify.annotation.Cached;
import com.googlecode.objectify.util.FutureHelper;
import com.googlecode.objectify.util.SimpleFutureWrapper;
/**
* <p>A write-through memcache for Entity objects that works for both transactional
* and nontransactional sessions. Entity cacheability and expiration are determined
* by the {@code @Cached} annotation on the POJO.</p>
*
* <ul>
* <li>Caches negative results as well as positive results.</li>
* <li>Queries do not affect the cache in any way.</li>
* <li>Transactional reads bypass the cache, but successful transaction commits will update the cache.</li>
* </ul>
*
* <p>Note: There is a horrible, obscure, and utterly bizarre bug in GAE's memcache
* relating to Key serialization. It manifests in certain circumstances when a Key
* has a parent Key that has the same String name. For this reason, we use the
* keyToString method to stringify Keys as cache keys. The actual structure
* stored in the memcache will be String -> Entity.</p>
*
* <p>Note2: Until Google adds a hook that lets us wrap native Future<?> implementations,
* this cache requires the AsyncCacheFilter to be installed. This wasn't necessary when
* the cache was synchronous, but async caching requires an extra hook for the end of
* a request when fired-and-forgotten put()s and delete()s get processed.</p>
*
* @author Jeff Schnitzer <jeff@infohazard.org>
*/
public class CachingAsyncDatastoreService implements AsyncDatastoreService
{
/** Source of metadata so we know which kinds to cache */
ObjectifyFactory fact;
/** The real datastore service objects - we need both */
AsyncDatastoreService rawAsync;
/** */
MemcacheService memcache;
/**
*/
public CachingAsyncDatastoreService(ObjectifyFactory fact, AsyncDatastoreService rawAsync, MemcacheService memcache)
{
this.fact = fact;
this.rawAsync = rawAsync;
this.memcache = memcache;
}
/**
* Breaks down the map into groupings based on which are cacheable and for how long.
*
* @return a map of expiration to Key/Entity map for only the entities that are cacheable
*/
private Map<Integer, Map<Key, Entity>> categorize(Map<Key, Entity> entities)
{
Map<Integer, Map<Key, Entity>> result = new HashMap<Integer, Map<Key, Entity>>();
for (Map.Entry<Key, Entity> entry: entities.entrySet())
{
Cached cachedAnno = this.fact.getMetadata(entry.getKey()).getCached(entry.getValue());
if (cachedAnno != null)
{
Integer expiry = cachedAnno.expirationSeconds();
Map<Key, Entity> grouping = result.get(expiry);
if (grouping == null)
{
grouping = new HashMap<Key, Entity>();
result.put(expiry, grouping);
}
grouping.put(entry.getKey(), entry.getValue());
}
}
return result;
}
/**
* Get values from the datastore, inserting negative results (null values) for any keys
* that are requested but don't come back.
*/
private Future<Map<Key, Entity>> getFromDatastore(Transaction txn, final Set<Key> stillNeeded)
{
Future<Map<Key, Entity>> prelim = this.rawAsync.get(txn, stillNeeded);
return new SimpleFutureWrapper<Map<Key, Entity>, Map<Key, Entity>>(prelim) {
@Override
protected Map<Key, Entity> wrap(Map<Key, Entity> t)
{
// Add null values for any keys not in the result set
if (t.size() != stillNeeded.size())
for (Key key: stillNeeded)
if (!t.containsKey(key))
t.put(key, null);
return t;
}
};
}
/** Hides the ugly casting and deals with String/Key conversion */
@SuppressWarnings({ "unchecked", "rawtypes" })
private Map<Key, Entity> getFromCacheRaw(Iterable<Key> keys)
{
Collection<String> keysColl = new ArrayList<String>();
for (Key key: keys)
keysColl.add(KeyFactory.keyToString(key));
Map<String, Entity> rawResults;
try {
rawResults = (Map)this.memcache.getAll((Collection)keysColl);
}
catch (Exception ex) {
// This should only be an issue if Google changes the serialization
// format of an Entity. It's possible, but this is just a cache so we
// can safely ignore the error.
return new HashMap<Key, Entity>();
}
Map<Key, Entity> keyMapped = new HashMap<Key, Entity>((int)(rawResults.size() * 1.5));
for(Map.Entry<String, Entity> entry: rawResults.entrySet())
keyMapped.put(KeyFactory.stringToKey(entry.getKey()), entry.getValue());
return keyMapped;
}
/**
* Get entries from cache. Ignores uncacheable keys.
*/
private Map<Key, Entity> getFromCache(Iterable<Key> keys)
{
Collection<Key> fetch = new ArrayList<Key>();
for (Key key: keys)
if (this.fact.getMetadata(key).mightBeInCache())
fetch.add(key);
return this.getFromCacheRaw(fetch);
}
/**
* Puts entries in the cache with the specified expiration.
* @param expirationSeconds can be -1 to indicate "keep as long as possible".
*/
@SuppressWarnings("rawtypes")
private void putInCache(Map<Key, Entity> entities, int expirationSeconds)
{
Map<String, Entity> rawMap = new HashMap<String, Entity>((int)(entities.size() * 1.5));
for (Map.Entry<Key, Entity> entry: entities.entrySet())
rawMap.put(KeyFactory.keyToString(entry.getKey()), entry.getValue());
if (expirationSeconds < 0)
this.memcache.putAll((Map)rawMap);
else
this.memcache.putAll((Map)rawMap, Expiration.byDeltaSeconds(expirationSeconds));
}
/**
* Puts entries in the cache with the appropriate expirations.
*/
void putInCache(Map<Key, Entity> entities)
{
Map<Integer, Map<Key, Entity>> categories = this.categorize(entities);
for (Map.Entry<Integer, Map<Key, Entity>> entry: categories.entrySet())
this.putInCache(entry.getValue(), entry.getKey());
}
/**
* Deletes from the cache, ignoring any noncacheable keys
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
void deleteFromCache(Iterable<Key> keys)
{
Collection<String> cacheables = new ArrayList<String>();
for (Key key: keys)
if (this.fact.getMetadata(key).mightBeInCache())
cacheables.add(KeyFactory.keyToString(key));
if (!cacheables.isEmpty())
this.memcache.deleteAll((Collection)cacheables);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#allocateIds(java.lang.String, long)
*/
@Override
public Future<KeyRange> allocateIds(String kind, long num)
{
return this.rawAsync.allocateIds(kind, num);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#allocateIds(com.google.appengine.api.datastore.Key, java.lang.String, long)
*/
@Override
public Future<KeyRange> allocateIds(Key parent, String kind, long num)
{
return this.rawAsync.allocateIds(parent, kind, num);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#beginTransaction()
*/
@Override
public Future<Transaction> beginTransaction()
{
return new SimpleFutureWrapper<Transaction, Transaction>(this.rawAsync.beginTransaction()) {
TransactionWrapper xact;
@Override
protected Transaction wrap(Transaction t)
{
if (xact == null)
xact = new TransactionWrapper(CachingAsyncDatastoreService.this, t);
return xact;
}
};
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#delete(com.google.appengine.api.datastore.Key[])
*/
@Override
public Future<Void> delete(Key... keys)
{
return this.delete(null, keys);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#delete(java.lang.Iterable)
*/
@Override
public Future<Void> delete(Iterable<Key> keys)
{
return this.delete(null, keys);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#delete(com.google.appengine.api.datastore.Transaction, com.google.appengine.api.datastore.Key[])
*/
@Override
public Future<Void> delete(Transaction txn, Key... keys)
{
return this.delete(txn, Arrays.asList(keys));
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#delete(com.google.appengine.api.datastore.Transaction, java.lang.Iterable)
*/
@Override
public Future<Void> delete(final Transaction txn, final Iterable<Key> keys)
{
ListenableFuture<Void> future = new ListenableFuture<Void>(this.rawAsync.delete(txn, keys));
future.addCallback(new Runnable() {
@Override
public void run()
{
if (txn != null)
{
for (Key key: keys)
((TransactionWrapper)txn).deferCacheDelete(key);
}
else
{
deleteFromCache(keys);
}
}
});
if (txn instanceof TransactionWrapper)
((TransactionWrapper)txn).enlist(future);
return future;
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#get(com.google.appengine.api.datastore.Key)
*/
@Override
public Future<Entity> get(Key key)
{
return this.get(null, key);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#get(java.lang.Iterable)
*/
@Override
public Future<Map<Key, Entity>> get(Iterable<Key> keys)
{
return this.get(null, keys);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#get(com.google.appengine.api.datastore.Transaction, com.google.appengine.api.datastore.Key)
*/
@Override
public Future<Entity> get(Transaction txn, final Key key)
{
Future<Map<Key, Entity>> bulk = this.get(txn, Collections.singleton(key));
return new SimpleFutureWrapper<Map<Key, Entity>, Entity>(bulk) {
@Override
protected Entity wrap(Map<Key, Entity> entities) throws Exception
{
Entity ent = entities.get(key);
if (ent == null)
throw new EntityNotFoundException(key);
else
return ent;
}
};
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#get(com.google.appengine.api.datastore.Transaction, java.lang.Iterable)
*/
@Override
public Future<Map<Key, Entity>> get(Transaction txn, Iterable<Key> keys)
{
if (txn != null)
{
// Must not populate the cache since we are looking at a frozen moment in time.
return this.rawAsync.get(txn, keys);
}
else
{
// soFar will not contain uncacheables, but it will have negative results
Map<Key, Entity> soFar = this.getFromCache(keys);
Set<Key> stillNeeded = new HashSet<Key>();
for (Key getKey: keys)
if (!soFar.containsKey(getKey))
stillNeeded.add(getKey);
// Maybe we need to fetch some more
Future<Map<Key, Entity>> pending = null;
if (!stillNeeded.isEmpty())
{
// Includes negative results
Future<Map<Key, Entity>> fromDatastore = this.getFromDatastore(txn, stillNeeded);
final ListenableFuture<Map<Key, Entity>> listenable = new ListenableFuture<Map<Key, Entity>>(fromDatastore);
listenable.addCallback(new Runnable() {
@Override
public void run()
{
try
{
putInCache(listenable.get());
}
catch (Exception e)
{
// Not entirely certain what to do with this
throw new RuntimeException(e);
}
}
});
pending = listenable;
}
Future<Map<Key, Entity>> merged = new MergeFuture<Key, Entity>(soFar, pending);
// Need to strip out any negative results
Future<Map<Key, Entity>> stripped = new SimpleFutureWrapper<Map<Key, Entity>, Map<Key, Entity>>(merged) {
@Override
protected Map<Key, Entity> wrap(Map<Key, Entity> t)
{
Iterator<Entity> it = t.values().iterator();
while (it.hasNext())
if (it.next() == null)
it.remove();
return t;
}
};
if (txn instanceof TransactionWrapper)
((TransactionWrapper)txn).enlist(stripped);
return stripped;
}
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.BaseDatastoreService#getActiveTransactions()
*/
@Override
public Collection<Transaction> getActiveTransactions()
{
// This would conflict with the wrapped transaction object
throw new UnsupportedOperationException();
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.BaseDatastoreService#getCurrentTransaction()
*/
@Override
public Transaction getCurrentTransaction()
{
// This would conflict with the wrapped transaction object
throw new UnsupportedOperationException();
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.BaseDatastoreService#getCurrentTransaction(com.google.appengine.api.datastore.Transaction)
*/
@Override
public Transaction getCurrentTransaction(Transaction txn)
{
// This would conflict with the wrapped transaction object
throw new UnsupportedOperationException();
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.BaseDatastoreService#prepare(com.google.appengine.api.datastore.Query)
*/
@Override
public PreparedQuery prepare(Query query)
{
return this.rawAsync.prepare(query);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.BaseDatastoreService#prepare(com.google.appengine.api.datastore.Transaction, com.google.appengine.api.datastore.Query)
*/
@Override
public PreparedQuery prepare(Transaction txn, Query query)
{
return this.rawAsync.prepare(txn, query);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#put(com.google.appengine.api.datastore.Entity)
*/
@Override
public Future<Key> put(Entity entity)
{
return this.put(null, entity);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.DatastoreService#put(java.lang.Iterable)
*/
@Override
public Future<List<Key>> put(Iterable<Entity> entities)
{
return this.put(null, entities);
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#put(com.google.appengine.api.datastore.Transaction, com.google.appengine.api.datastore.Entity)
*/
@Override
public Future<Key> put(final Transaction txn, final Entity entity)
{
final ListenableFuture<Key> result = new ListenableFuture<Key>(this.rawAsync.put(txn, entity));
result.addCallback(new Runnable() {
@Override
public void run()
{
// This forces the GAE future to update the key in the entity
FutureHelper.quietGet(result);
// Cacheability checking is handled inside these methods
if (txn != null)
((TransactionWrapper)txn).deferCachePut(entity);
else
putInCache(Collections.singletonMap(entity.getKey(), entity));
}
});
if (txn instanceof TransactionWrapper)
((TransactionWrapper)txn).enlist(result);
return result;
}
/* (non-Javadoc)
* @see com.google.appengine.api.datastore.AsyncDatastoreService#put(com.google.appengine.api.datastore.Transaction, java.lang.Iterable)
*/
@Override
public Future<List<Key>> put(final Transaction txn, final Iterable<Entity> entities)
{
final ListenableFuture<List<Key>> result = new ListenableFuture<List<Key>>(this.rawAsync.put(txn, entities));
result.addCallback(new Runnable() {
@Override
public void run()
{
// This forces the GAE future to update the keys in the entities
FutureHelper.quietGet(result);
if (txn != null)
{
for (Entity ent: entities)
((TransactionWrapper)txn).deferCachePut(ent);
}
else
{
Map<Key, Entity> map = new HashMap<Key, Entity>();
for (Entity entity: entities)
map.put(entity.getKey(), entity);
putInCache(map);
}
}
});
if (txn instanceof TransactionWrapper)
((TransactionWrapper)txn).enlist(result);
return result;
}
}
| |
/*****************************************************************************
* Copyright 2007-2015 DCA-FEEC-UNICAMP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributors:
* Patricia Rocha de Toro, Elisa Calhau de Castro, Ricardo Ribeiro Gudwin
*****************************************************************************/
package worldserver3d.view;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import model.Thing;
import model.Food;
import java.util.List;
import java.util.ArrayList;
import java.util.logging.Logger;
import model.Environment;
import util.Constants;
import worldserver3d.ThingCreator;
/**
*
* @author eccastro
*/
public class EditFoodFrame extends JFrame {
Thing food;
Thing jewel;
Thing DS;
JPanel mainPanel;
JPanel panel1;
JPanel panel2;
JPanel panel3;
JButton closeButton;
JButton deleteButton;
JButton newCreatureButton;
JButton newJewelButton;
private Environment e;
private JCheckBoxMenuItem hiddenObstacle;
//JComboBox combo;
JPanel radioPanel;
List<JRadioButton> typeOfFood;
public double x, y;
private DefaultButtonModel model;
private ButtonGroup group;
JTextArea ta;
Logger log;
public EditFoodFrame(final Environment e) {
log = Logger.getLogger(EditFoodFrame.class.getCanonicalName());
this.food = null;
this.e = e;
SwingUtilities.invokeLater(new Runnable() {
public void run() {
createSwingStuff();
}
});
}
public void setFood(Thing fd) {
this.food = fd;
ta.setText(" "+this.food.getMyName());
if (((Food) this.food).perishable) {
typeOfFood.get(1).setSelected(true);
} else {
typeOfFood.get(0).setSelected(true);
}
hiddenObstacle.setEnabled(true);
if (food.returnIfWasHidden()) {
hiddenObstacle.setSelected(true);
} else {
hiddenObstacle.setSelected(false);
}
setVisible(false);
}
/* Method called by Environment to paint EditBrickTab with
* correct obstacle visibility status (hidden or not).
*/
public void showForCreation() {
hiddenObstacle.setSelected(false);
hiddenObstacle.setEnabled(false);
for (JRadioButton tof : typeOfFood) {
tof.setEnabled(true);
}
newCreatureButton.setEnabled(true);
newJewelButton.setEnabled(true);
group.setSelected(model, true);
}
private JPanel createFoodTypeButtons(String[] array) {
JPanel radioPanel = new JPanel();
typeOfFood = new ArrayList<JRadioButton>();
JRadioButton type0Radio = new JRadioButton(this.e.nonPerishableFood);
JRadioButton type1Radio = new JRadioButton(this.e.perishableFood);
typeOfFood.add(type0Radio);
typeOfFood.add(type1Radio);
type0Radio.setActionCommand(this.e.nonPerishableFood);
type1Radio.setActionCommand(this.e.perishableFood);
// Register a listener for the radio buttons.
RadioListener myListener = new RadioListener(type0Radio, type1Radio, newCreatureButton, newJewelButton);
type0Radio.addActionListener(myListener);
type1Radio.addActionListener(myListener);
// Group the radio buttons.
group = new ButtonGroup();
group.add(type0Radio);
group.add(type1Radio);
model = new DefaultButtonModel();
group.setSelected(model, false);
radioPanel.setLayout(new GridLayout(0, 1));
radioPanel.add(type0Radio);
radioPanel.add(type1Radio);
return radioPanel;
}
public void setXY(int x, int y) {
this.x = x;
this.y = y;
}
public void placeCreatureHere(double mouseXini, double mouseYini) {
/**
* Note that through the edit window the motor system of the robot is
* always a differential steering approach.
*/
ThingCreator tc = new ThingCreator(e);
tc.createCreature(true, mouseXini, mouseYini, 0);
}
public void placeJewelHere(double mouseXini, double mouseYini) {
ThingCreator tc = new ThingCreator(e);
jewel = tc.createThing(Constants.categoryJEWEL, x, y);
}
public void update() {
newCreatureButton.setEnabled(false);
newJewelButton.setEnabled(false);
if (food != null) {
for (JRadioButton tof : typeOfFood) {
tof.setEnabled(false);
}
group.setSelected(model, false);
hiddenObstacle.setEnabled(true);
if (food.returnIfWasHidden()) {
hiddenObstacle.setSelected(true);
} else {
hiddenObstacle.setSelected(false);
}
}
}
public JPanel getMainPanel() {
return mainPanel;
}
class RadioListener implements ActionListener {
private JRadioButton NPFoodRadio;
private JRadioButton PFoodRadio;
private JButton newCreatureButton;
private JButton newJewelButton;
public RadioListener(JRadioButton NPFoodRadio, JRadioButton PFoodRadio, JButton newCreatureButton, JButton newJewelButton) {
this.NPFoodRadio = NPFoodRadio;
this.PFoodRadio = PFoodRadio;
this.newCreatureButton = newCreatureButton;
this.newJewelButton = newJewelButton;
}
public void actionPerformed(ActionEvent ev) {
ThingCreator tc = new ThingCreator(e);
if (e.nonPerishableFood.equals(ev.getActionCommand())) {
food = tc.createThing(Constants.categoryNPFOOD, x, y);
PFoodRadio.setEnabled(false);
} else { //perishable food
food = tc.createThing(Constants.categoryPFOOD, x, y);
NPFoodRadio.setEnabled(false);
}
newCreatureButton.setEnabled(false);
newJewelButton.setEnabled(false);
setFood(food);
}
}
private void createSwingStuff() {
mainPanel = new JPanel();
panel1 = new JPanel();
panel2 = new JPanel();
panel3 = new JPanel();
closeButton = new JButton("Close");
deleteButton = new JButton("Delete me!");
ta = new JTextArea("");
ta.setEditable(false);
newCreatureButton = new JButton("New Creature");
newCreatureButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
log.info("*** Create creature! ***");
placeCreatureHere(x, y);
setVisible(false);
}
});
newJewelButton = new JButton("New Jewel");
newJewelButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
log.info("*** Create Jewel! ***");
placeJewelHere(x, y);
setVisible(false);
}
});
closeButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
setVisible(false);
}
});
deleteButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent ev) {
log.info("*** Food deleted! ***");
food.removeRememberMeIcon(e);
e.removeThing(food);
setVisible(false);
}
});
panel1.setBorder(
BorderFactory.createCompoundBorder(
BorderFactory.createCompoundBorder(
BorderFactory.createTitledBorder("Food type"),
BorderFactory.createEmptyBorder(5, 5, 5, 5)),
panel1.getBorder()));
panel1.setLayout(new GridLayout(1, 1));
String[] foodTypes = {this.e.nonPerishableFood, this.e.perishableFood};
panel1.add(createFoodTypeButtons(foodTypes));
mainPanel.setLayout(new BorderLayout());
mainPanel.add(ta, BorderLayout.PAGE_START);
mainPanel.add(panel1, BorderLayout.LINE_START);
panel2.setBorder(
BorderFactory.createCompoundBorder(
BorderFactory.createCompoundBorder(
BorderFactory.createTitledBorder("Visibility"),
BorderFactory.createEmptyBorder(5, 5, 5, 5)),
panel2.getBorder()));
panel2.setLayout(new GridLayout(1, 1));
hiddenObstacle = new JCheckBoxMenuItem("I'm hidden");
hiddenObstacle.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent ae) {
try {
//System.out.println("======= Event command name : "+e.getActionCommand()+" and params: "+e.paramString() );
if (hiddenObstacle.isSelected()) {
food.hideMe(e);
hiddenObstacle.setSelected(true);
} else {
food.undoHideMe(e);
hiddenObstacle.setSelected(false);
}
//theMainInstance.sf.gameState.ThingsRN.updateRenderState();
} catch (Exception ex) {
ex.printStackTrace();
JOptionPane.showMessageDialog(EditFoodFrame.this, "Error in hidden obstacle execution.", "ERRO", JOptionPane.ERROR_MESSAGE);
}
}
});
panel2.add(hiddenObstacle);
mainPanel.add(panel2, BorderLayout.LINE_END);
panel3.setLayout(new GridLayout(1, 4));
panel3.add(newCreatureButton);
panel3.add(newJewelButton);
panel3.add(deleteButton);
panel3.add(closeButton);
mainPanel.add(panel3, BorderLayout.PAGE_END);
add(mainPanel);
pack();
setVisible(false);
setResizable(false);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cdancy.bitbucket.rest.features;
import static org.assertj.core.api.Assertions.assertThat;
import static com.cdancy.bitbucket.rest.BitbucketConstants.CREDENTIALS_ENVIRONMENT_VARIABLE;
import static com.cdancy.bitbucket.rest.BitbucketConstants.CREDENTIALS_SYSTEM_PROPERTY;
import static com.cdancy.bitbucket.rest.BitbucketConstants.ENDPOINT_ENVIRONMENT_VARIABLE;
import static com.cdancy.bitbucket.rest.BitbucketConstants.ENDPOINT_SYSTEM_PROPERTY;
import static com.cdancy.bitbucket.rest.BitbucketConstants.TOKEN_ENVIRONMENT_VARIABLE;
import static com.cdancy.bitbucket.rest.BitbucketConstants.TOKEN_SYSTEM_PROPERTY;
import org.testng.annotations.Test;
import com.cdancy.bitbucket.rest.BaseBitbucketApiLiveTest;
import com.cdancy.bitbucket.rest.BitbucketAuthentication;
import com.cdancy.bitbucket.rest.BitbucketClient;
import com.cdancy.bitbucket.rest.TestUtilities;
import com.cdancy.bitbucket.rest.auth.AuthenticationType;
import com.cdancy.bitbucket.rest.domain.admin.UserPage;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.jclouds.javax.annotation.Nullable;
@Test(groups = "live", testName = "BitbucketClientLiveTest", singleThreaded = true)
@SuppressWarnings("PMD.TooManyStaticImports")
public class BitbucketClientLiveTest extends BaseBitbucketApiLiveTest {
private static final String DUMMY_ENDPOINT = "http://some-non-existent-host:12345";
private static final String SYSTEM_JCLOUDS_TIMEOUT = "bitbucket.rest.jclouds.so-timeout";
private static final String ENVIRONMENT_JCLOUDS_TIMEOUT = "BITBUCKET_REST_JCLOUDS_SO-TIMEOUT";
@Test
public void testCreateClient() {
final BitbucketClient client = new BitbucketClient(this.endpoint, this.bitbucketAuthentication, null, null);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isEmpty();
}
@Test
public void testCreateClientWithBuilder() {
final BitbucketClient.Builder builder = BitbucketClient.builder();
switch (bitbucketAuthentication.authType()) {
case Anonymous: break;
case Basic:
builder.credentials(bitbucketAuthentication.authValue());
break;
case Bearer:
builder.token(bitbucketAuthentication.authValue());
break;
default: break;
}
final BitbucketClient client = builder.endPoint(this.endpoint).build();
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isEmpty();
}
@Test
public void testCreateClientWithWrongCredentials() {
final BitbucketAuthentication auth = BitbucketAuthentication
.builder()
.credentials(TestUtilities.randomStringLettersOnly())
.build();
final BitbucketClient client = new BitbucketClient(this.endpoint, auth, null, null);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isNotEmpty();
}
@Test
public void testCreateClientWithEndpointFromSystemProperties() {
clearSystemProperties();
System.setProperty(ENDPOINT_SYSTEM_PROPERTY, this.endpoint);
final BitbucketClient client = new BitbucketClient(null, this.bitbucketAuthentication, null, null);
assertThat(client.endPoint()).isEqualTo(this.endpoint);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isEmpty();
clearSystemProperties();
}
@Test
public void testCreateClientWithWrongEndpointFromSystemProperties() {
clearSystemProperties();
System.setProperty(ENDPOINT_SYSTEM_PROPERTY, DUMMY_ENDPOINT);
final BitbucketClient client = new BitbucketClient(null, this.bitbucketAuthentication, null, null);
assertThat(client.endPoint()).isEqualTo(DUMMY_ENDPOINT);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isNotEmpty();
clearSystemProperties();
}
@Test
public void testCreateClientWithAuthenticationFromSystemProperties() {
clearSystemProperties();
final AuthenticationType currentAuthType = this.bitbucketAuthentication.authType();
final String correctAuth = this.bitbucketAuthentication.authValue();
if (currentAuthType == AuthenticationType.Basic) {
System.setProperty(CREDENTIALS_SYSTEM_PROPERTY, correctAuth);
} else if (currentAuthType == AuthenticationType.Bearer) {
System.setProperty(TOKEN_SYSTEM_PROPERTY, correctAuth);
}
final BitbucketClient client = new BitbucketClient(this.endpoint, null, null, null);
assertThat(client.authType()).isEqualTo(currentAuthType);
assertThat(client.authValue()).isEqualTo(correctAuth);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isEmpty();
clearSystemProperties();
}
@Test
public void testCreateClientWithWrongAuthenticationFromSystemProperties() {
clearSystemProperties();
final AuthenticationType currentAuthType = this.bitbucketAuthentication.authType();
final String wrongAuth = TestUtilities.randomStringLettersOnly();
if (currentAuthType == AuthenticationType.Basic) {
System.setProperty(CREDENTIALS_SYSTEM_PROPERTY, wrongAuth);
} else if (currentAuthType == AuthenticationType.Bearer) {
System.setProperty(TOKEN_SYSTEM_PROPERTY, wrongAuth);
}
final BitbucketClient client = new BitbucketClient(this.endpoint, null, null, null);
assertThat(client.authType()).isEqualTo(currentAuthType);
assertThat(client.authValue()).isEqualTo(wrongAuth);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isNotEmpty();
clearSystemProperties();
}
@Test
public void testCreateClientWithEndpointFromEnvironmentVariables() {
clearEnvironmentVariables(null);
final Map<String, String> envVars = Maps.newHashMap();
envVars.put(ENDPOINT_ENVIRONMENT_VARIABLE, this.endpoint);
TestUtilities.addEnvironmentVariables(envVars);
final BitbucketClient client = new BitbucketClient(null, this.bitbucketAuthentication, null, null);
assertThat(client.endPoint()).isEqualTo(this.endpoint);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isEmpty();
clearEnvironmentVariables(null);
}
@Test
public void testCreateClientWithWrongEndpointFromEnvironmentVariables() {
clearEnvironmentVariables(null);
final Map<String, String> envVars = Maps.newHashMap();
envVars.put(ENDPOINT_ENVIRONMENT_VARIABLE, DUMMY_ENDPOINT);
TestUtilities.addEnvironmentVariables(envVars);
final BitbucketClient client = new BitbucketClient(null, this.bitbucketAuthentication, null, null);
assertThat(client.endPoint()).isEqualTo(DUMMY_ENDPOINT);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isNotEmpty();
clearEnvironmentVariables(null);
}
@Test
public void testCreateClientWithAuthenticationFromEnvironmentVariables() {
clearEnvironmentVariables(null);
final AuthenticationType currentAuthType = this.bitbucketAuthentication.authType();
final String correctAuth = this.bitbucketAuthentication.authValue();
final String correctAuthType;
switch (currentAuthType) {
case Basic:
correctAuthType = CREDENTIALS_ENVIRONMENT_VARIABLE;
break;
case Bearer:
correctAuthType = TOKEN_ENVIRONMENT_VARIABLE;
break;
default:
correctAuthType = null;
break;
}
final Map<String, String> envVars = Maps.newHashMap();
envVars.put(correctAuthType, correctAuth);
TestUtilities.addEnvironmentVariables(envVars);
final BitbucketClient client = new BitbucketClient(this.endpoint, null, null, null);
assertThat(client.authType()).isEqualTo(currentAuthType);
assertThat(client.authValue()).isEqualTo(correctAuth);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isEmpty();
clearEnvironmentVariables(null);
}
@Test
public void testCreateClientWithWrongAuthenticationFromEnvironmentVariables() {
clearEnvironmentVariables(null);
final AuthenticationType currentAuthType = this.bitbucketAuthentication.authType();
final String wrongAuth = TestUtilities.randomStringLettersOnly();
final String correctAuthType;
switch (currentAuthType) {
case Basic:
correctAuthType = CREDENTIALS_ENVIRONMENT_VARIABLE;
break;
case Bearer:
correctAuthType = TOKEN_ENVIRONMENT_VARIABLE;
break;
default:
correctAuthType = null;
break;
}
final Map<String, String> envVars = Maps.newHashMap();
envVars.put(correctAuthType, wrongAuth);
TestUtilities.addEnvironmentVariables(envVars);
final BitbucketClient client = new BitbucketClient(this.endpoint, null, null, null);
assertThat(client.authType()).isEqualTo(currentAuthType);
assertThat(client.authValue()).isEqualTo(wrongAuth);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isNotEmpty();
clearEnvironmentVariables(null);
}
@Test
public void testCreateClientWithOverridesAndFail() {
final Properties properties = new Properties();
properties.put("jclouds.so-timeout", -1);
final BitbucketClient client = new BitbucketClient(this.endpoint, this.bitbucketAuthentication, properties, null);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isNotEmpty();
assertThat(userPage.errors().get(0).context()).contains("timeouts can't be negative");
}
@Test
public void testCreateClientWithOverridesFromSystemPropertiesAndFail() {
System.clearProperty(SYSTEM_JCLOUDS_TIMEOUT);
System.setProperty(SYSTEM_JCLOUDS_TIMEOUT, "-1");
final BitbucketClient client = new BitbucketClient(this.endpoint, this.bitbucketAuthentication, null, null);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isNotEmpty();
assertThat(userPage.errors().get(0).context()).contains("timeouts can't be negative");
System.clearProperty(SYSTEM_JCLOUDS_TIMEOUT);
}
@Test
public void testCreateClientWithOverridesFromEnvironmentVariablesAndFail() {
final Map<String, String> envVars = Maps.newHashMap();
envVars.put(ENVIRONMENT_JCLOUDS_TIMEOUT, "-1");
TestUtilities.addEnvironmentVariables(envVars);
final BitbucketClient client = new BitbucketClient(this.endpoint, this.bitbucketAuthentication, null, null);
final UserPage userPage = client.api().adminApi().listUsers(null, 1, 1);
assertThat(userPage).isNotNull();
assertThat(userPage.errors()).isNotEmpty();
assertThat(userPage.errors().get(0).context()).contains("timeouts can't be negative");
clearEnvironmentVariables(envVars.keySet());
}
private void clearSystemProperties() {
System.clearProperty(ENDPOINT_SYSTEM_PROPERTY);
System.clearProperty(CREDENTIALS_SYSTEM_PROPERTY);
System.clearProperty(TOKEN_SYSTEM_PROPERTY);
}
private void clearEnvironmentVariables(@Nullable final Collection optionalKeysToClear) {
final List<String> envVars = Lists.newArrayList(ENDPOINT_SYSTEM_PROPERTY);
envVars.add(CREDENTIALS_SYSTEM_PROPERTY);
envVars.add(TOKEN_SYSTEM_PROPERTY);
if (optionalKeysToClear != null) {
envVars.addAll(optionalKeysToClear);
}
TestUtilities.removeEnvironmentVariables(envVars);
}
}
| |
/*
* Copyright (c) 2017, Robin Weymans <Robin.weymans@gmail.com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.hunter;
import com.google.common.eventbus.Subscribe;
import com.google.inject.Provides;
import java.time.Instant;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.inject.Inject;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import net.runelite.api.Client;
import net.runelite.api.GameObject;
import net.runelite.api.ObjectID;
import net.runelite.api.Player;
import net.runelite.api.Tile;
import net.runelite.api.coords.LocalPoint;
import net.runelite.api.coords.WorldPoint;
import net.runelite.api.events.ConfigChanged;
import net.runelite.api.events.GameObjectSpawned;
import net.runelite.api.events.GameTick;
import net.runelite.client.Notifier;
import net.runelite.client.config.ConfigManager;
import net.runelite.client.plugins.Plugin;
import net.runelite.client.plugins.PluginDescriptor;
import net.runelite.client.util.QueryRunner;
@Slf4j
@PluginDescriptor(
name = "Hunter"
)
public class HunterPlugin extends Plugin
{
@Inject
private Client client;
@Inject
private QueryRunner queryRunner;
@Inject
@Getter
private TrapOverlay overlay;
@Inject
private Notifier notifier;
@Inject
private HunterConfig config;
@Getter
private final Map<WorldPoint, HunterTrap> traps = new HashMap<>();
@Getter
private Instant lastActionTime = Instant.ofEpochMilli(0);
private WorldPoint lastTickLocalPlayerLocation;
@Provides
HunterConfig provideConfig(ConfigManager configManager)
{
return configManager.getConfig(HunterConfig.class);
}
@Override
protected void startUp()
{
overlay.updateConfig();
}
@Override
protected void shutDown() throws Exception
{
lastActionTime = Instant.ofEpochMilli(0);
traps.clear();
}
@Subscribe
public void onGameObjectSpawned(GameObjectSpawned event)
{
final GameObject gameObject = event.getGameObject();
final HunterTrap myTrap = traps.get(gameObject.getWorldLocation());
final Player localPlayer = client.getLocalPlayer();
switch (gameObject.getId())
{
/*
* ------------------------------------------------------------------------------
* Placing traps
* ------------------------------------------------------------------------------
*/
case ObjectID.DEADFALL: // Deadfall trap placed
case ObjectID.MONKEY_TRAP: // Maniacal monkey trap placed
// If player is right next to "object" trap assume that player placed the trap
if (localPlayer.getWorldLocation().distanceTo(gameObject.getWorldLocation()) <= 1)
{
log.debug("Trap placed by \"{}\" on {}", localPlayer.getName(), gameObject.getWorldLocation());
traps.put(gameObject.getWorldLocation(), new HunterTrap(gameObject));
lastActionTime = Instant.now();
}
break;
case ObjectID.MAGIC_BOX: // Imp box placed
case ObjectID.BOX_TRAP_9380: // Box trap placed
case ObjectID.BIRD_SNARE_9345: // Bird snare placed
case ObjectID.NET_TRAP_9343: // Net trap placed at green sallys
case ObjectID.NET_TRAP: // Net trap placed at orange sallys
case ObjectID.NET_TRAP_8992: // Net trap placed at red sallys
case ObjectID.NET_TRAP_9002: // Net trap placed at black sallys
// If the player is on that tile, assume he is the one that placed the trap
// Note that a player can move and set up a trap in the same tick, and this
// event runs after the player movement has been updated, so we need to
// compare to the trap location to the last location of the player.
if (lastTickLocalPlayerLocation != null
&& gameObject.getWorldLocation().distanceTo(lastTickLocalPlayerLocation) == 0)
{
log.debug("Trap placed by \"{}\" on {}", localPlayer.getName(), localPlayer.getWorldLocation());
traps.put(gameObject.getWorldLocation(), new HunterTrap(gameObject));
lastActionTime = Instant.now();
}
break;
/*
* ------------------------------------------------------------------------------
* Catching stuff
* ------------------------------------------------------------------------------
*/
case ObjectID.MAGIC_BOX_19226: // Imp caught
case ObjectID.SHAKING_BOX: // Black chinchompa caught
case ObjectID.SHAKING_BOX_9382: // Grey chinchompa caught
case ObjectID.SHAKING_BOX_9383: // Red chinchompa caught
case ObjectID.BOULDER_20648: // Prickly kebbit caught
case ObjectID.BOULDER_20649: // Sabre-tooth kebbit caught
case ObjectID.BOULDER_20650: // Barb-tailed kebbit caught
case ObjectID.BOULDER_20651: // Wild kebbit caught
case ObjectID.BIRD_SNARE_9373: // Crimson swift caught
case ObjectID.BIRD_SNARE_9375: // Cerulean twitch caught
case ObjectID.BIRD_SNARE_9377: // Golden warbler caught
case ObjectID.BIRD_SNARE_9379: // Copper longtail caught
case ObjectID.BIRD_SNARE_9348: // Tropical wagtail caught
case ObjectID.NET_TRAP_9004: // Green sally caught
case ObjectID.NET_TRAP_8986: // Red sally caught
case ObjectID.NET_TRAP_8734: // Orange sally caught
case ObjectID.NET_TRAP_8996: // Black sally caught
case ObjectID.LARGE_BOULDER_28830: // Maniacal monkey tail obtained
case ObjectID.LARGE_BOULDER_28831: // Maniacal monkey tail obtained
if (myTrap != null)
{
myTrap.setState(HunterTrap.State.FULL);
myTrap.resetTimer();
lastActionTime = Instant.now();
if (config.maniacalMonkeyNotify() && myTrap.getObjectId() == ObjectID.MONKEY_TRAP)
{
notifier.notify("You've caught part of a monkey's tail.");
}
}
break;
/*
* ------------------------------------------------------------------------------
* Failed catch
* ------------------------------------------------------------------------------
*/
case ObjectID.MAGIC_BOX_FAILED: //Empty imp box
case ObjectID.BOX_TRAP_9385: //Empty box trap
case ObjectID.BIRD_SNARE: //Empty box trap
if (myTrap != null)
{
myTrap.setState(HunterTrap.State.EMPTY);
myTrap.resetTimer();
lastActionTime = Instant.now();
}
break;
/*
* ------------------------------------------------------------------------------
* Transitions
* ------------------------------------------------------------------------------
*/
// Imp entering box
case ObjectID.MAGIC_BOX_19225:
// Black chin shaking box
case ObjectID.BOX_TRAP:
case ObjectID.BOX_TRAP_2026:
case ObjectID.BOX_TRAP_2028:
case ObjectID.BOX_TRAP_2029:
// Red chin shaking box
case ObjectID.BOX_TRAP_9381:
case ObjectID.BOX_TRAP_9390:
case ObjectID.BOX_TRAP_9391:
case ObjectID.BOX_TRAP_9392:
case ObjectID.BOX_TRAP_9393:
// Grey chin shaking box
case ObjectID.BOX_TRAP_9386:
case ObjectID.BOX_TRAP_9387:
case ObjectID.BOX_TRAP_9388:
// Bird traps
case ObjectID.BIRD_SNARE_9346:
case ObjectID.BIRD_SNARE_9347:
case ObjectID.BIRD_SNARE_9349:
case ObjectID.BIRD_SNARE_9374:
case ObjectID.BIRD_SNARE_9376:
case ObjectID.BIRD_SNARE_9378:
// Deadfall trap
case ObjectID.DEADFALL_19218:
case ObjectID.DEADFALL_19851:
case ObjectID.DEADFALL_20128:
case ObjectID.DEADFALL_20129:
case ObjectID.DEADFALL_20130:
case ObjectID.DEADFALL_20131:
// Net trap
case ObjectID.NET_TRAP_9003:
case ObjectID.NET_TRAP_9005:
case ObjectID.NET_TRAP_8972:
case ObjectID.NET_TRAP_8974:
case ObjectID.NET_TRAP_8985:
case ObjectID.NET_TRAP_8987:
case ObjectID.NET_TRAP_8993:
case ObjectID.NET_TRAP_8997:
// Maniacal monkey boulder trap
case ObjectID.MONKEY_TRAP_28828:
case ObjectID.MONKEY_TRAP_28829:
if (myTrap != null)
{
myTrap.setState(HunterTrap.State.TRANSITION);
}
break;
}
}
/**
* Iterates over all the traps that were placed by the local player and
* checks if the trap is still there. If the trap is gone, it removes
* the trap from the local players trap collection.
*/
@Subscribe
public void onGameTick(GameTick event)
{
// Check if all traps are still there, and remove the ones that are not.
Iterator<Map.Entry<WorldPoint, HunterTrap>> it = traps.entrySet().iterator();
Tile[][][] tiles = client.getRegion().getTiles();
Instant expire = Instant.now().minus(HunterTrap.TRAP_TIME.multipliedBy(2));
while (it.hasNext())
{
Map.Entry<WorldPoint, HunterTrap> entry = it.next();
HunterTrap trap = entry.getValue();
WorldPoint world = entry.getKey();
LocalPoint local = LocalPoint.fromWorld(client, world);
// Not within the client's viewport
if (local == null)
{
// Cull very old traps
if (trap.getPlacedOn().isBefore(expire))
{
log.debug("Trap removed from personal trap collection due to timeout, {} left", traps.size());
it.remove();
continue;
}
continue;
}
Tile tile = tiles[world.getPlane()][local.getRegionX()][local.getRegionY()];
GameObject[] objects = tile.getGameObjects();
boolean containsBoulder = false;
boolean containsAnything = false;
for (GameObject object : objects)
{
if (object != null)
{
containsAnything = true;
if (object.getId() == ObjectID.BOULDER_19215 || object.getId() == ObjectID.LARGE_BOULDER)
{
containsBoulder = true;
break;
}
}
}
if (!containsAnything)
{
it.remove();
log.debug("Trap removed from personal trap collection, {} left", traps.size());
}
else if (containsBoulder) // For traps like deadfalls. This is different because when the trap is gone, there is still a GameObject (boulder)
{
it.remove();
log.debug("Special trap removed from personal trap collection, {} left", traps.size());
// Case we have notifications enabled and the action was not manual, throw notification
if (config.maniacalMonkeyNotify() && trap.getObjectId() == ObjectID.MONKEY_TRAP &&
!trap.getState().equals(HunterTrap.State.FULL) && !trap.getState().equals(HunterTrap.State.OPEN))
{
notifier.notify("The monkey escaped.");
}
}
}
lastTickLocalPlayerLocation = client.getLocalPlayer().getWorldLocation();
}
@Subscribe
public void onConfigChanged(ConfigChanged event)
{
if (event.getGroup().equals("hunterplugin"))
{
overlay.updateConfig();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway.local;
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
import com.carrotsearch.hppc.ObjectOpenHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.predicates.ObjectPredicate;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.MutableShardRouting;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation;
import org.elasticsearch.cluster.routing.allocation.allocator.GatewayAllocator;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.gateway.local.state.shards.TransportNodesListGatewayStartedShards;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.StoreFileMetaData;
import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData;
import org.elasticsearch.transport.ConnectTransportException;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
/**
*
*/
public class LocalGatewayAllocator extends AbstractComponent implements GatewayAllocator {
public static final String INDEX_RECOVERY_INITIAL_SHARDS = "index.recovery.initial_shards";
private final TransportNodesListGatewayStartedShards listGatewayStartedShards;
private final TransportNodesListShardStoreMetaData listShardStoreMetaData;
private final ConcurrentMap<ShardId, Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData>> cachedStores = ConcurrentCollections.newConcurrentMap();
private final ConcurrentMap<ShardId, ObjectLongOpenHashMap<DiscoveryNode>> cachedShardsState = ConcurrentCollections.newConcurrentMap();
private final TimeValue listTimeout;
private final String initialShards;
@Inject
public LocalGatewayAllocator(Settings settings,
TransportNodesListGatewayStartedShards listGatewayStartedShards, TransportNodesListShardStoreMetaData listShardStoreMetaData) {
super(settings);
this.listGatewayStartedShards = listGatewayStartedShards;
this.listShardStoreMetaData = listShardStoreMetaData;
this.listTimeout = componentSettings.getAsTime("list_timeout", TimeValue.timeValueSeconds(30));
this.initialShards = componentSettings.get("initial_shards", "quorum");
logger.debug("using initial_shards [{}], list_timeout [{}]", initialShards, listTimeout);
}
@Override
public void applyStartedShards(StartedRerouteAllocation allocation) {
for (ShardRouting shardRouting : allocation.startedShards()) {
cachedStores.remove(shardRouting.shardId());
cachedShardsState.remove(shardRouting.shardId());
}
}
@Override
public void applyFailedShards(FailedRerouteAllocation allocation) {
for (ShardRouting failedShard : allocation.failedShards()) {
cachedStores.remove(failedShard.shardId());
cachedShardsState.remove(failedShard.shardId());
}
}
@Override
public boolean allocateUnassigned(RoutingAllocation allocation) {
boolean changed = false;
DiscoveryNodes nodes = allocation.nodes();
RoutingNodes routingNodes = allocation.routingNodes();
// First, handle primaries, they must find a place to be allocated on here
Iterator<MutableShardRouting> unassignedIterator = routingNodes.unassigned().iterator();
while (unassignedIterator.hasNext()) {
MutableShardRouting shard = unassignedIterator.next();
if (!shard.primary()) {
continue;
}
// this is an API allocation, ignore since we know there is no data...
if (!routingNodes.routingTable().index(shard.index()).shard(shard.id()).primaryAllocatedPostApi()) {
continue;
}
ObjectLongOpenHashMap<DiscoveryNode> nodesState = buildShardStates(nodes, shard);
int numberOfAllocationsFound = 0;
long highestVersion = -1;
Set<DiscoveryNode> nodesWithHighestVersion = Sets.newHashSet();
final boolean[] states = nodesState.allocated;
final Object[] keys = nodesState.keys;
final long[] values = nodesState.values;
for (int i = 0; i < states.length; i++) {
if (!states[i]) {
continue;
}
DiscoveryNode node = (DiscoveryNode) keys[i];
long version = values[i];
// since we don't check in NO allocation, we need to double check here
if (allocation.shouldIgnoreShardForNode(shard.shardId(), node.id())) {
continue;
}
if (version != -1) {
numberOfAllocationsFound++;
if (highestVersion == -1) {
nodesWithHighestVersion.add(node);
highestVersion = version;
} else {
if (version > highestVersion) {
nodesWithHighestVersion.clear();
nodesWithHighestVersion.add(node);
highestVersion = version;
} else if (version == highestVersion) {
nodesWithHighestVersion.add(node);
}
}
}
}
// check if the counts meets the minimum set
int requiredAllocation = 1;
try {
IndexMetaData indexMetaData = routingNodes.metaData().index(shard.index());
String initialShards = indexMetaData.settings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards));
if ("quorum".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 1) {
requiredAllocation = ((1 + indexMetaData.numberOfReplicas()) / 2) + 1;
}
} else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 2) {
requiredAllocation = ((1 + indexMetaData.numberOfReplicas()) / 2);
}
} else if ("one".equals(initialShards)) {
requiredAllocation = 1;
} else if ("full".equals(initialShards) || "all".equals(initialShards)) {
requiredAllocation = indexMetaData.numberOfReplicas() + 1;
} else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 1) {
requiredAllocation = indexMetaData.numberOfReplicas();
}
} else {
requiredAllocation = Integer.parseInt(initialShards);
}
} catch (Exception e) {
logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard);
}
// not enough found for this shard, continue...
if (numberOfAllocationsFound < requiredAllocation) {
// we can't really allocate, so ignore it and continue
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: not allocating, number_of_allocated_shards_found [{}], required_number [{}]", shard.index(), shard.id(), numberOfAllocationsFound, requiredAllocation);
}
continue;
}
Set<DiscoveryNode> throttledNodes = Sets.newHashSet();
Set<DiscoveryNode> noNodes = Sets.newHashSet();
for (DiscoveryNode discoNode : nodesWithHighestVersion) {
RoutingNode node = routingNodes.node(discoNode.id());
if (node == null) {
continue;
}
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.THROTTLE) {
throttledNodes.add(discoNode);
} else if (decision.type() == Decision.Type.NO) {
noNodes.add(discoNode);
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, discoNode);
}
// we found a match
changed = true;
// make sure we create one with the version from the recovered state
allocation.routingNodes().assign(new MutableShardRouting(shard, highestVersion), node.nodeId());
unassignedIterator.remove();
// found a node, so no throttling, no "no", and break out of the loop
throttledNodes.clear();
noNodes.clear();
break;
}
}
if (throttledNodes.isEmpty()) {
// if we have a node that we "can't" allocate to, force allocation, since this is our master data!
if (!noNodes.isEmpty()) {
DiscoveryNode discoNode = noNodes.iterator().next();
RoutingNode node = routingNodes.node(discoNode.id());
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: forcing allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, discoNode);
}
// we found a match
changed = true;
// make sure we create one with the version from the recovered state
allocation.routingNodes().assign(new MutableShardRouting(shard, highestVersion), node.nodeId());
unassignedIterator.remove();
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: throttling allocation [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, throttledNodes);
}
// we are throttling this, but we have enough to allocate to this node, ignore it for now
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
}
}
if (!routingNodes.hasUnassigned()) {
return changed;
}
// Now, handle replicas, try to assign them to nodes that are similar to the one the primary was allocated on
unassignedIterator = routingNodes.unassigned().iterator();
while (unassignedIterator.hasNext()) {
MutableShardRouting shard = unassignedIterator.next();
// pre-check if it can be allocated to any node that currently exists, so we won't list the store for it for nothing
boolean canBeAllocatedToAtLeastOneNode = false;
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
RoutingNode node = routingNodes.node(cursor.value.id());
if (node == null) {
continue;
}
// if we can't allocate it on a node, ignore it, for example, this handles
// cases for only allocating a replica after a primary
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.YES) {
canBeAllocatedToAtLeastOneNode = true;
break;
}
}
if (!canBeAllocatedToAtLeastOneNode) {
continue;
}
Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> shardStores = buildShardStores(nodes, shard);
long lastSizeMatched = 0;
DiscoveryNode lastDiscoNodeMatched = null;
RoutingNode lastNodeMatched = null;
for (Map.Entry<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> nodeStoreEntry : shardStores.entrySet()) {
DiscoveryNode discoNode = nodeStoreEntry.getKey();
TransportNodesListShardStoreMetaData.StoreFilesMetaData storeFilesMetaData = nodeStoreEntry.getValue();
logger.trace("{}: checking node [{}]", shard, discoNode);
if (storeFilesMetaData == null) {
// already allocated on that node...
continue;
}
RoutingNode node = routingNodes.node(discoNode.id());
if (node == null) {
continue;
}
// check if we can allocate on that node...
// we only check for NO, since if this node is THROTTLING and it has enough "same data"
// then we will try and assign it next time
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.NO) {
continue;
}
// if it is already allocated, we can't assign to it...
if (storeFilesMetaData.allocated()) {
continue;
}
if (!shard.primary()) {
MutableShardRouting primaryShard = routingNodes.activePrimary(shard);
if (primaryShard != null) {
assert primaryShard.active();
DiscoveryNode primaryNode = nodes.get(primaryShard.currentNodeId());
if (primaryNode != null) {
TransportNodesListShardStoreMetaData.StoreFilesMetaData primaryNodeStore = shardStores.get(primaryNode);
if (primaryNodeStore != null && primaryNodeStore.allocated()) {
long sizeMatched = 0;
for (StoreFileMetaData storeFileMetaData : storeFilesMetaData) {
if (primaryNodeStore.fileExists(storeFileMetaData.name()) && primaryNodeStore.file(storeFileMetaData.name()).isSame(storeFileMetaData)) {
sizeMatched += storeFileMetaData.length();
}
}
if (sizeMatched > lastSizeMatched) {
lastSizeMatched = sizeMatched;
lastDiscoNodeMatched = discoNode;
lastNodeMatched = node;
}
}
}
}
}
}
if (lastNodeMatched != null) {
// we only check on THROTTLE since we checked before before on NO
Decision decision = allocation.deciders().canAllocate(shard, lastNodeMatched, allocation);
if (decision.type() == Decision.Type.THROTTLE) {
if (logger.isTraceEnabled()) {
logger.debug("[{}][{}]: throttling allocation [{}] to [{}] in order to reuse its unallocated persistent store with total_size [{}]", shard.index(), shard.id(), shard, lastDiscoNodeMatched, new ByteSizeValue(lastSizeMatched));
}
// we are throttling this, but we have enough to allocate to this node, ignore it for now
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: allocating [{}] to [{}] in order to reuse its unallocated persistent store with total_size [{}]", shard.index(), shard.id(), shard, lastDiscoNodeMatched, new ByteSizeValue(lastSizeMatched));
}
// we found a match
changed = true;
allocation.routingNodes().assign(shard, lastNodeMatched.nodeId());
unassignedIterator.remove();
}
}
}
return changed;
}
private ObjectLongOpenHashMap<DiscoveryNode> buildShardStates(final DiscoveryNodes nodes, MutableShardRouting shard) {
ObjectLongOpenHashMap<DiscoveryNode> shardStates = cachedShardsState.get(shard.shardId());
ObjectOpenHashSet<String> nodeIds;
if (shardStates == null) {
shardStates = new ObjectLongOpenHashMap<>();
cachedShardsState.put(shard.shardId(), shardStates);
nodeIds = ObjectOpenHashSet.from(nodes.dataNodes().keys());
} else {
// clean nodes that have failed
shardStates.keys().removeAll(new ObjectPredicate<DiscoveryNode>() {
@Override
public boolean apply(DiscoveryNode node) {
return !nodes.nodeExists(node.id());
}
});
nodeIds = ObjectOpenHashSet.newInstance();
// we have stored cached from before, see if the nodes changed, if they have, go fetch again
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
DiscoveryNode node = cursor.value;
if (!shardStates.containsKey(node)) {
nodeIds.add(node.id());
}
}
}
if (nodeIds.isEmpty()) {
return shardStates;
}
String[] nodesIdsArray = nodeIds.toArray(String.class);
TransportNodesListGatewayStartedShards.NodesLocalGatewayStartedShards response = listGatewayStartedShards.list(shard.shardId(), nodesIdsArray, listTimeout).actionGet();
if (logger.isDebugEnabled()) {
if (response.failures().length > 0) {
StringBuilder sb = new StringBuilder(shard + ": failures when trying to list shards on nodes:");
for (int i = 0; i < response.failures().length; i++) {
Throwable cause = ExceptionsHelper.unwrapCause(response.failures()[i]);
if (cause instanceof ConnectTransportException) {
continue;
}
sb.append("\n -> ").append(response.failures()[i].getDetailedMessage());
}
logger.debug(sb.toString());
}
}
for (TransportNodesListGatewayStartedShards.NodeLocalGatewayStartedShards nodeShardState : response) {
// -1 version means it does not exists, which is what the API returns, and what we expect to
shardStates.put(nodeShardState.getNode(), nodeShardState.version());
}
return shardStates;
}
private Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> buildShardStores(DiscoveryNodes nodes, MutableShardRouting shard) {
Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> shardStores = cachedStores.get(shard.shardId());
ObjectOpenHashSet<String> nodesIds;
if (shardStores == null) {
shardStores = Maps.newHashMap();
cachedStores.put(shard.shardId(), shardStores);
nodesIds = ObjectOpenHashSet.from(nodes.dataNodes().keys());
} else {
nodesIds = ObjectOpenHashSet.newInstance();
// clean nodes that have failed
for (Iterator<DiscoveryNode> it = shardStores.keySet().iterator(); it.hasNext(); ) {
DiscoveryNode node = it.next();
if (!nodes.nodeExists(node.id())) {
it.remove();
}
}
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
DiscoveryNode node = cursor.value;
if (!shardStores.containsKey(node)) {
nodesIds.add(node.id());
}
}
}
if (!nodesIds.isEmpty()) {
String[] nodesIdsArray = nodesIds.toArray(String.class);
TransportNodesListShardStoreMetaData.NodesStoreFilesMetaData nodesStoreFilesMetaData = listShardStoreMetaData.list(shard.shardId(), false, nodesIdsArray, listTimeout).actionGet();
if (logger.isTraceEnabled()) {
if (nodesStoreFilesMetaData.failures().length > 0) {
StringBuilder sb = new StringBuilder(shard + ": failures when trying to list stores on nodes:");
for (int i = 0; i < nodesStoreFilesMetaData.failures().length; i++) {
Throwable cause = ExceptionsHelper.unwrapCause(nodesStoreFilesMetaData.failures()[i]);
if (cause instanceof ConnectTransportException) {
continue;
}
sb.append("\n -> ").append(nodesStoreFilesMetaData.failures()[i].getDetailedMessage());
}
logger.trace(sb.toString());
}
}
for (TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData nodeStoreFilesMetaData : nodesStoreFilesMetaData) {
if (nodeStoreFilesMetaData.storeFilesMetaData() != null) {
shardStores.put(nodeStoreFilesMetaData.getNode(), nodeStoreFilesMetaData.storeFilesMetaData());
}
}
}
return shardStores;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import javax.annotation.Generated;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
/**
* The ganglia component is used for sending metrics to the Ganglia monitoring
* system.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface GangliaEndpointBuilderFactory {
/**
* Builder for endpoint for the Ganglia component.
*/
public interface GangliaEndpointBuilder extends EndpointProducerBuilder {
default AdvancedGangliaEndpointBuilder advanced() {
return (AdvancedGangliaEndpointBuilder) this;
}
/**
* Minumum time in seconds before Ganglia will purge the metric value if
* it expires. Set to 0 and the value will remain in Ganglia
* indefinitely until a gmond agent restart.
*
* The option is a: <code>int</code> type.
*
* Default: 0
* Group: producer
*/
default GangliaEndpointBuilder dmax(int dmax) {
doSetProperty("dmax", dmax);
return this;
}
/**
* Minumum time in seconds before Ganglia will purge the metric value if
* it expires. Set to 0 and the value will remain in Ganglia
* indefinitely until a gmond agent restart.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 0
* Group: producer
*/
default GangliaEndpointBuilder dmax(String dmax) {
doSetProperty("dmax", dmax);
return this;
}
/**
* The group that the metric belongs to.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: java
* Group: producer
*/
default GangliaEndpointBuilder groupName(String groupName) {
doSetProperty("groupName", groupName);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default GangliaEndpointBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default GangliaEndpointBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* The name to use for the metric.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: metric
* Group: producer
*/
default GangliaEndpointBuilder metricName(String metricName) {
doSetProperty("metricName", metricName);
return this;
}
/**
* Send the UDP metric packets using MULTICAST or UNICAST.
*
* The option is a:
* <code>info.ganglia.gmetric4j.gmetric.GMetric$UDPAddressingMode</code>
* type.
*
* Default: MULTICAST
* Group: producer
*/
default GangliaEndpointBuilder mode(UDPAddressingMode mode) {
doSetProperty("mode", mode);
return this;
}
/**
* Send the UDP metric packets using MULTICAST or UNICAST.
*
* The option will be converted to a
* <code>info.ganglia.gmetric4j.gmetric.GMetric$UDPAddressingMode</code>
* type.
*
* Default: MULTICAST
* Group: producer
*/
default GangliaEndpointBuilder mode(String mode) {
doSetProperty("mode", mode);
return this;
}
/**
* Prefix the metric name with this string and an underscore.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default GangliaEndpointBuilder prefix(String prefix) {
doSetProperty("prefix", prefix);
return this;
}
/**
* The slope.
*
* The option is a:
* <code>info.ganglia.gmetric4j.gmetric.GMetricSlope</code> type.
*
* Default: BOTH
* Group: producer
*/
default GangliaEndpointBuilder slope(GMetricSlope slope) {
doSetProperty("slope", slope);
return this;
}
/**
* The slope.
*
* The option will be converted to a
* <code>info.ganglia.gmetric4j.gmetric.GMetricSlope</code> type.
*
* Default: BOTH
* Group: producer
*/
default GangliaEndpointBuilder slope(String slope) {
doSetProperty("slope", slope);
return this;
}
/**
* Spoofing information IP:hostname.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default GangliaEndpointBuilder spoofHostname(String spoofHostname) {
doSetProperty("spoofHostname", spoofHostname);
return this;
}
/**
* Maximum time in seconds that the value can be considered current.
* After this, Ganglia considers the value to have expired.
*
* The option is a: <code>int</code> type.
*
* Default: 60
* Group: producer
*/
default GangliaEndpointBuilder tmax(int tmax) {
doSetProperty("tmax", tmax);
return this;
}
/**
* Maximum time in seconds that the value can be considered current.
* After this, Ganglia considers the value to have expired.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 60
* Group: producer
*/
default GangliaEndpointBuilder tmax(String tmax) {
doSetProperty("tmax", tmax);
return this;
}
/**
* If using multicast, set the TTL of the packets.
*
* The option is a: <code>int</code> type.
*
* Default: 5
* Group: producer
*/
default GangliaEndpointBuilder ttl(int ttl) {
doSetProperty("ttl", ttl);
return this;
}
/**
* If using multicast, set the TTL of the packets.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 5
* Group: producer
*/
default GangliaEndpointBuilder ttl(String ttl) {
doSetProperty("ttl", ttl);
return this;
}
/**
* The type of value.
*
* The option is a:
* <code>info.ganglia.gmetric4j.gmetric.GMetricType</code> type.
*
* Default: STRING
* Group: producer
*/
default GangliaEndpointBuilder type(GMetricType type) {
doSetProperty("type", type);
return this;
}
/**
* The type of value.
*
* The option will be converted to a
* <code>info.ganglia.gmetric4j.gmetric.GMetricType</code> type.
*
* Default: STRING
* Group: producer
*/
default GangliaEndpointBuilder type(String type) {
doSetProperty("type", type);
return this;
}
/**
* Any unit of measurement that qualifies the metric, e.g. widgets,
* litres, bytes. Do not include a prefix such as k (kilo) or m (milli),
* other tools may scale the units later. The value should be unscaled.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default GangliaEndpointBuilder units(String units) {
doSetProperty("units", units);
return this;
}
/**
* Use the wire format of Ganglia 3.1.0 and later versions. Set this to
* false to use Ganglia 3.0.x or earlier.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*/
default GangliaEndpointBuilder wireFormat31x(boolean wireFormat31x) {
doSetProperty("wireFormat31x", wireFormat31x);
return this;
}
/**
* Use the wire format of Ganglia 3.1.0 and later versions. Set this to
* false to use Ganglia 3.0.x or earlier.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: producer
*/
default GangliaEndpointBuilder wireFormat31x(String wireFormat31x) {
doSetProperty("wireFormat31x", wireFormat31x);
return this;
}
}
/**
* Advanced builder for endpoint for the Ganglia component.
*/
public interface AdvancedGangliaEndpointBuilder
extends
EndpointProducerBuilder {
default GangliaEndpointBuilder basic() {
return (GangliaEndpointBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGangliaEndpointBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGangliaEndpointBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGangliaEndpointBuilder synchronous(boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGangliaEndpointBuilder synchronous(String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Proxy enum for
* <code>info.ganglia.gmetric4j.gmetric.GMetric$UDPAddressingMode</code>
* enum.
*/
enum UDPAddressingMode {
MULTICAST,
UNICAST;
}
/**
* Proxy enum for <code>info.ganglia.gmetric4j.gmetric.GMetricSlope</code>
* enum.
*/
enum GMetricSlope {
ZERO,
POSITIVE,
NEGATIVE,
BOTH;
}
/**
* Proxy enum for <code>info.ganglia.gmetric4j.gmetric.GMetricType</code>
* enum.
*/
enum GMetricType {
STRING,
INT8,
UINT8,
INT16,
UINT16,
INT32,
UINT32,
FLOAT,
DOUBLE;
}
public interface GangliaBuilders {
/**
* Ganglia (camel-ganglia)
* The ganglia component is used for sending metrics to the Ganglia
* monitoring system.
*
* Category: monitoring
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-ganglia
*
* Syntax: <code>ganglia:host:port</code>
*
* Path parameter: host
* Host name for Ganglia server
* Default value: 239.2.11.71
*
* Path parameter: port
* Port for Ganglia server
* Default value: 8649
*/
default GangliaEndpointBuilder ganglia(String path) {
return GangliaEndpointBuilderFactory.ganglia(path);
}
}
/**
* Ganglia (camel-ganglia)
* The ganglia component is used for sending metrics to the Ganglia
* monitoring system.
*
* Category: monitoring
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-ganglia
*
* Syntax: <code>ganglia:host:port</code>
*
* Path parameter: host
* Host name for Ganglia server
* Default value: 239.2.11.71
*
* Path parameter: port
* Port for Ganglia server
* Default value: 8649
*/
static GangliaEndpointBuilder ganglia(String path) {
class GangliaEndpointBuilderImpl extends AbstractEndpointBuilder implements GangliaEndpointBuilder, AdvancedGangliaEndpointBuilder {
public GangliaEndpointBuilderImpl(String path) {
super("ganglia", path);
}
}
return new GangliaEndpointBuilderImpl(path);
}
}
| |
/**
* Copyright 2005 Sakai Foundation Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.sakaiproject.evaluation.tool.reporting;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.sakaiproject.evaluation.logic.EvalCommonLogic;
import org.sakaiproject.evaluation.logic.EvalEvaluationService;
import org.sakaiproject.evaluation.logic.ReportingPermissions;
import org.sakaiproject.evaluation.model.EvalEvaluation;
import org.sakaiproject.evaluation.tool.viewparams.DownloadReportViewParams;
import org.sakaiproject.evaluation.toolaccess.EvaluationAccessAPI;
import org.sakaiproject.evaluation.toolaccess.ToolApi;
import org.springframework.context.MessageSource;
import com.opencsv.CSVParser;
import lombok.extern.slf4j.Slf4j;
import uk.org.ponder.messageutil.MessageLocator;
import uk.org.ponder.util.UniversalRuntimeException;
/**
*
* @author Steven Githens
* @author Aaron Zeckoski (aaronz@vt.edu)
*/
@Slf4j
public class ReportExporterBean implements ToolApi {
// Section awareness/new report style bindings
public String viewID = "";
public String fileName = "";
public String[] groupIDs = new String[]{};
public Long templateID = 0L;
public Long evalID = 0L;
public boolean newReportStyle = false;
private MessageSource messageSource;
// the real MessageLocator won't work except in an RSAC session, which we can't reasonably create
// this is a reasonable fake, given that we have no way to get a locale when exporting without one
// Taken from LessonBuilder
// This probably could be fixed in RSF to avoid the error and use a simpler version
public MessageSource getMessageSource() {
return messageSource;
}
public void setMessageSource(MessageSource messageSource) {
this.messageSource = messageSource;
}
public class MyMessageLocator extends MessageLocator {
public String getMessage(String[] code, Object[] args) {
if (code != null) {
for (String s: code) {
try {
return messageSource.getMessage(s, args, Locale.getDefault());
} catch (Exception e) {
log.warn(e.getLocalizedMessage(), e );
}
}
// if none found, just use the code
return code[0];
} else
return "";
}
}
private EvalCommonLogic commonLogic;
public void setCommonLogic(EvalCommonLogic commonLogic) {
this.commonLogic = commonLogic;
}
private EvalEvaluationService evaluationService;
public void setEvaluationService(EvalEvaluationService evaluationService) {
this.evaluationService = evaluationService;
}
private ReportingPermissions reportingPermissions;
public void setReportingPermissions(ReportingPermissions perms) {
this.reportingPermissions = perms;
}
private Map<String, ReportExporter> exportersMap;
public void setExportersMap(Map<String, ReportExporter> exportersMap) {
this.exportersMap = exportersMap;
}
public DownloadReportViewParams processReport()
{
return new DownloadReportViewParams( viewID, templateID, evalID, groupIDs, fileName, newReportStyle );
}
EvaluationAccessAPI evaluationAccessAPI = null;
public void setEvaluationAccessAPI(EvaluationAccessAPI s) {
evaluationAccessAPI = s;
}
//Export report with no evaluateeId (for single export)
public void exportReport(EvalEvaluation evaluation, String groupIds,OutputStream outputStream, String exportType) {
exportReport(evaluation,groupIds,null,outputStream,exportType);
}
//Special convenience method to allow passing of groupIds as a CSV
public void exportReport(EvalEvaluation evaluation, String groupIds, String evaluateeId, OutputStream outputStream, String exportType) {
String[] groupIdsArray = new String [] {};
CSVParser parser= new CSVParser();
if (groupIds != null) {
try {
groupIdsArray = parser.parseLine(groupIds);
} catch (IOException e) {
//Is fine if this happens, empty array still
}
}
exportReport(evaluation,groupIdsArray,evaluateeId,outputStream,exportType);
}
//Allows for general report exporting
public void exportReport(EvalEvaluation evaluation, String[] groupIds, String evaluateeId, OutputStream outputStream, String exportType) {
ReportExporter exporter = exportersMap.get(exportType);
if (exporter == null) {
throw new IllegalArgumentException("No exporter found for ViewID: " + exportType);
}
if (log.isDebugEnabled()) {
log.debug("Found exporter: " + exporter.getClass() + " for drvp.viewID " + exportType);
}
if (groupIds == null || groupIds.length==0) {
//Get the default groupIds
String[] groupIdsArray = new String [] {};
groupIds = reportingPermissions.getResultsViewableEvalGroupIdsForCurrentUser(evaluation).toArray(groupIdsArray);
}
// do a permission check
if (!reportingPermissions.canViewEvaluationResponses(evaluation, groupIds)) {
String currentUserId = commonLogic.getCurrentUserId();
throw new SecurityException("Invalid user attempting to access report downloads: "
+ currentUserId);
}
MyMessageLocator messageLocator = new MyMessageLocator();
exporter.setMessageLocator(messageLocator);
if (EvalEvaluationService.PDF_RESULTS_REPORT_INDIVIDUAL.equals(exportType)) {
exporter.buildReport(evaluation, groupIds, evaluateeId, outputStream, newReportStyle);
} else {
exporter.buildReport(evaluation, groupIds, outputStream, newReportStyle);
}
}
public void init() {
evaluationAccessAPI.setToolApi(this);
}
public boolean export(DownloadReportViewParams drvp, HttpServletResponse response) {
// get evaluation and template from DAO
EvalEvaluation evaluation = evaluationService.getEvaluationById(drvp.evalId);
OutputStream resultsOutputStream;
// Get rid of spaces in the filename
drvp.filename = drvp.filename.replaceAll( " ", "_" );
ReportExporter exporter = exportersMap.get(drvp.viewID);
if (exporter == null) {
throw new IllegalArgumentException("No exporter found for ViewID: " + drvp.viewID);
}
if (log.isDebugEnabled()) {
log.debug("Found exporter: " + exporter.getClass() + " for drvp.viewID " + drvp.viewID);
}
resultsOutputStream = getOutputStream(response);
// If it's a CSV export in the new report format, we need to change the filename extension to '.zip' instead of '.csv',
// as it will contain 2 files (instructor items and course items)
if( isCSV( drvp.viewID ) && newReportStyle )
{
drvp.filename = drvp.filename.replace( ".csv", ".zip" );
}
// If it's a .csv or .pdf download, force the browser to download the file instead of displaying it inside the iframe
if( isCSVTakers( drvp.viewID ) || isCSV( drvp.viewID ) || isPDF( drvp.viewID ) )
{
response.setHeader( "Content-disposition", "attachment; filename=\"" + drvp.filename + "\"" );
response.setHeader( "Pragma", "public" );
response.setHeader( "Expires", "0" );
response.setHeader( "Cache-Control", "must-revalidate, post-check=0, pre-check=0" );
response.setHeader( "Content-Transfer-Encoding", "binary" );
response.setContentType( "application/octet-stream" );
}
// If it's anything else, just do the normal header content
else
{
response.setHeader("Content-disposition", "inline; filename=\"" + drvp.filename+"\"");
response.setContentType(exporter.getContentType());
}
//Support drvp.evaluateeId
this.exportReport(evaluation,drvp.groupIds,drvp.evaluateeId,resultsOutputStream,drvp.viewID);
return true;
}
// Utility methods
private boolean isCSVTakers ( String viewID ) { return viewID.equals( EvalEvaluationService.CSV_TAKERS_REPORT ); }
private boolean isCSV ( String viewID ) { return viewID.equals( EvalEvaluationService.CSV_RESULTS_REPORT ); }
private boolean isPDF ( String viewID ) { return (viewID.equals( EvalEvaluationService.PDF_RESULTS_REPORT ) ||
(viewID.equals( EvalEvaluationService.PDF_RESULTS_REPORT_INDIVIDUAL ))); }
private OutputStream getOutputStream(HttpServletResponse response){
try {
return response.getOutputStream();
} catch (IOException ioe) {
throw UniversalRuntimeException.accumulate(ioe,
"Unable to get response stream for Evaluation Results Export");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tom_roush.pdfbox.pdmodel.interactive.digitalsignature.visible;
import android.graphics.Bitmap;
import android.util.Log;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import com.tom_roush.harmony.awt.geom.AffineTransform;
import com.tom_roush.pdfbox.cos.COSArray;
import com.tom_roush.pdfbox.cos.COSDictionary;
import com.tom_roush.pdfbox.cos.COSName;
import com.tom_roush.pdfbox.pdmodel.PDDocument;
import com.tom_roush.pdfbox.pdmodel.PDPage;
import com.tom_roush.pdfbox.pdmodel.PDResources;
import com.tom_roush.pdfbox.pdmodel.common.PDRectangle;
import com.tom_roush.pdfbox.pdmodel.common.PDStream;
import com.tom_roush.pdfbox.pdmodel.graphics.form.PDFormXObject;
import com.tom_roush.pdfbox.pdmodel.graphics.image.LosslessFactory;
import com.tom_roush.pdfbox.pdmodel.graphics.image.PDImageXObject;
import com.tom_roush.pdfbox.pdmodel.interactive.annotation.PDAnnotationWidget;
import com.tom_roush.pdfbox.pdmodel.interactive.annotation.PDAppearanceDictionary;
import com.tom_roush.pdfbox.pdmodel.interactive.annotation.PDAppearanceStream;
import com.tom_roush.pdfbox.pdmodel.interactive.digitalsignature.PDSignature;
import com.tom_roush.pdfbox.pdmodel.interactive.form.PDAcroForm;
import com.tom_roush.pdfbox.pdmodel.interactive.form.PDField;
import com.tom_roush.pdfbox.pdmodel.interactive.form.PDSignatureField;
/**
* Implementation of {@link PDFTemplateBuilder}. This builds the signature PDF but doesn't keep the
* elements, these are kept in its PDF template structure.
*
* @author Vakhtang Koroghlishvili
*/
public class PDVisibleSigBuilder implements PDFTemplateBuilder
{
private final PDFTemplateStructure pdfStructure;
/**
* Constructor, creates PDF template structure.
*/
public PDVisibleSigBuilder()
{
pdfStructure = new PDFTemplateStructure();
Log.i("PdfBox-Android", "PDF Structure has been created");
}
@Override
public void createPage(PDVisibleSignDesigner properties)
{
PDPage page = new PDPage(new PDRectangle(properties.getPageWidth(),
properties.getPageHeight()));
pdfStructure.setPage(page);
Log.i("PdfBox-Android", "PDF page has been created");
}
/**
* Creates a PDDocument and adds the page parameter to it and keeps this as a template in the
* PDF template Structure.
*
* @param page
* @throws IOException
*/
@Override
public void createTemplate(PDPage page) throws IOException
{
PDDocument template = new PDDocument();
template.addPage(page);
pdfStructure.setTemplate(template);
}
@Override
public void createAcroForm(PDDocument template)
{
PDAcroForm theAcroForm = new PDAcroForm(template);
template.getDocumentCatalog().setAcroForm(theAcroForm);
pdfStructure.setAcroForm(theAcroForm);
Log.i("PdfBox-Android", "AcroForm has been created");
}
@Override
public PDFTemplateStructure getStructure()
{
return pdfStructure;
}
@Override
public void createSignatureField(PDAcroForm acroForm) throws IOException
{
PDSignatureField sf = new PDSignatureField(acroForm);
pdfStructure.setSignatureField(sf);
Log.i("PdfBox-Android", "Signature field has been created");
}
@Override
public void createSignature(PDSignatureField pdSignatureField, PDPage page, String signerName)
throws IOException
{
PDSignature pdSignature = new PDSignature();
PDAnnotationWidget widget = pdSignatureField.getWidgets().get(0);
pdSignatureField.setValue(pdSignature);
widget.setPage(page);
page.getAnnotations().add(widget);
if (!signerName.isEmpty())
{
pdSignature.setName(signerName);
}
pdfStructure.setPdSignature(pdSignature);
Log.i("PdfBox-Android", "PDSignature has been created");
}
@Override
public void createAcroFormDictionary(PDAcroForm acroForm, PDSignatureField signatureField)
throws IOException
{
@SuppressWarnings("unchecked")
List<PDField> acroFormFields = acroForm.getFields();
COSDictionary acroFormDict = acroForm.getCOSObject();
acroForm.setSignaturesExist(true);
acroForm.setAppendOnly(true);
acroFormDict.setDirect(true);
acroFormFields.add(signatureField);
acroForm.setDefaultAppearance("/sylfaen 0 Tf 0 g");
pdfStructure.setAcroFormFields(acroFormFields);
pdfStructure.setAcroFormDictionary(acroFormDict);
Log.i("PdfBox-Android", "AcroForm dictionary has been created");
}
@Override
public void createSignatureRectangle(PDSignatureField signatureField,
PDVisibleSignDesigner properties) throws IOException
{
PDRectangle rect = new PDRectangle();
rect.setUpperRightX(properties.getxAxis() + properties.getWidth());
rect.setUpperRightY(properties.getTemplateHeight() - properties.getyAxis());
rect.setLowerLeftY(properties.getTemplateHeight() - properties.getyAxis() -
properties.getHeight());
rect.setLowerLeftX(properties.getxAxis());
signatureField.getWidgets().get(0).setRectangle(rect);
pdfStructure.setSignatureRectangle(rect);
Log.i("PdfBox-Android", "Signature rectangle has been created");
}
/**
* {@inheritDoc }
*
* @deprecated use {@link #createAffineTransform(com.tom_roush.harmony.awt.geom.AffineTransform) }
*/
@Override
@Deprecated
public void createAffineTransform(byte[] params)
{
AffineTransform transform = new AffineTransform(params[0], params[1], params[2],
params[3], params[4], params[5]);
pdfStructure.setAffineTransform(transform);
Log.i("PdfBox-Android", "Matrix has been added");
}
@Override
public void createAffineTransform(AffineTransform affineTransform)
{
pdfStructure.setAffineTransform(affineTransform);
Log.i("PdfBox-Android", "Matrix has been added");
}
@Override
public void createProcSetArray()
{
COSArray procSetArr = new COSArray();
procSetArr.add(COSName.getPDFName("PDF"));
procSetArr.add(COSName.getPDFName("Text"));
procSetArr.add(COSName.getPDFName("ImageB"));
procSetArr.add(COSName.getPDFName("ImageC"));
procSetArr.add(COSName.getPDFName("ImageI"));
pdfStructure.setProcSet(procSetArr);
Log.i("PdfBox-Android", "ProcSet array has been created");
}
@Override
public void createSignatureImage(PDDocument template, Bitmap image) throws IOException
{
pdfStructure.setImage(LosslessFactory.createFromImage(template, image));
Log.i("PdfBox-Android", "Visible Signature Image has been created");
}
/**
* {@inheritDoc }
*
* @deprecated use {@link #createFormatterRectangle(int[]) createFormatterRectangle(int[])}
*/
@Override
@Deprecated
public void createFormatterRectangle(byte[] params)
{
PDRectangle formatterRectangle = new PDRectangle();
formatterRectangle.setLowerLeftX(Math.min(params[0],params[2]));
formatterRectangle.setLowerLeftY(Math.min(params[1],params[3]));
formatterRectangle.setUpperRightX(Math.max(params[0],params[2]));
formatterRectangle.setUpperRightY(Math.max(params[1],params[3]));
pdfStructure.setFormatterRectangle(formatterRectangle);
Log.i("PdfBox-Android", "Formatter rectangle has been created");
}
@Override
public void createFormatterRectangle(int[] params)
{
PDRectangle formatterRectangle = new PDRectangle();
formatterRectangle.setLowerLeftX(Math.min(params[0],params[2]));
formatterRectangle.setLowerLeftY(Math.min(params[1],params[3]));
formatterRectangle.setUpperRightX(Math.max(params[0],params[2]));
formatterRectangle.setUpperRightY(Math.max(params[1],params[3]));
pdfStructure.setFormatterRectangle(formatterRectangle);
Log.i("PdfBox-Android", "Formatter rectangle has been created");
}
@Override
public void createHolderFormStream(PDDocument template)
{
PDStream holderForm = new PDStream(template);
pdfStructure.setHolderFormStream(holderForm);
Log.i("PdfBox-Android", "Holder form stream has been created");
}
@Override
public void createHolderFormResources()
{
PDResources holderFormResources = new PDResources();
pdfStructure.setHolderFormResources(holderFormResources);
Log.i("PdfBox-Android", "Holder form resources have been created");
}
@Override
public void createHolderForm(PDResources holderFormResources, PDStream holderFormStream,
PDRectangle bbox)
{
PDFormXObject holderForm = new PDFormXObject(holderFormStream);
holderForm.setResources(holderFormResources);
holderForm.setBBox(bbox);
holderForm.setFormType(1);
pdfStructure.setHolderForm(holderForm);
Log.i("PdfBox-Android", "Holder form has been created");
}
@Override
public void createAppearanceDictionary(PDFormXObject holderForml,
PDSignatureField signatureField) throws IOException
{
PDAppearanceDictionary appearance = new PDAppearanceDictionary();
appearance.getCOSObject().setDirect(true);
PDAppearanceStream appearanceStream = new PDAppearanceStream(holderForml.getCOSObject());
appearance.setNormalAppearance(appearanceStream);
signatureField.getWidgets().get(0).setAppearance(appearance);
pdfStructure.setAppearanceDictionary(appearance);
Log.i("PdfBox-Android", "PDF appearance dictionary has been created");
}
@Override
public void createInnerFormStream(PDDocument template)
{
PDStream innerFormStream = new PDStream(template);
pdfStructure.setInnterFormStream(innerFormStream);
Log.i("PdfBox-Android", "Stream of another form (inner form - it will be inside holder form) " +
"has been created");
}
@Override
public void createInnerFormResource()
{
PDResources innerFormResources = new PDResources();
pdfStructure.setInnerFormResources(innerFormResources);
Log.i("PdfBox-Android", "Resources of another form (inner form - it will be inside holder form)" +
"have been created");
}
@Override
public void createInnerForm(PDResources innerFormResources,
PDStream innerFormStream,
PDRectangle bbox)
{
PDFormXObject innerForm = new PDFormXObject(innerFormStream);
innerForm.setResources(innerFormResources);
innerForm.setBBox(bbox);
innerForm.setFormType(1);
pdfStructure.setInnerForm(innerForm);
Log.i("PdfBox-Android", "Another form (inner form - it will be inside holder form) has been created");
}
@Override
public void insertInnerFormToHolderResources(PDFormXObject innerForm,
PDResources holderFormResources)
{
holderFormResources.put(COSName.FRM, innerForm);
pdfStructure.setInnerFormName(COSName.FRM);
Log.i("PdfBox-Android", "Now inserted inner form inside holder form");
}
@Override
public void createImageFormStream(PDDocument template)
{
PDStream imageFormStream = new PDStream(template);
pdfStructure.setImageFormStream(imageFormStream);
Log.i("PdfBox-Android", "Created image form stream");
}
@Override
public void createImageFormResources()
{
PDResources imageFormResources = new PDResources();
pdfStructure.setImageFormResources(imageFormResources);
Log.i("PdfBox-Android", "Created image form resources");
}
@Override
public void createImageForm(PDResources imageFormResources, PDResources innerFormResource,
PDStream imageFormStream, PDRectangle bbox, AffineTransform at,
PDImageXObject img) throws IOException
{
PDFormXObject imageForm = new PDFormXObject(imageFormStream);
imageForm.setBBox(bbox);
imageForm.setMatrix(at);
imageForm.setResources(imageFormResources);
imageForm.setFormType(1);
imageFormResources.getCOSObject().setDirect(true);
COSName imageFormName = COSName.getPDFName("n2");
innerFormResource.put(imageFormName, imageForm);
COSName imageName = imageFormResources.add(img, "img");
pdfStructure.setImageForm(imageForm);
pdfStructure.setImageFormName(imageFormName);
pdfStructure.setImageName(imageName);
Log.i("PdfBox-Android", "Created image form");
}
@Override
public void createBackgroundLayerForm(PDResources innerFormResource, PDRectangle bbox)
throws IOException
{
// create blank n0 background layer form
PDFormXObject n0Form = new PDFormXObject(pdfStructure.getTemplate().getDocument().createCOSStream());
n0Form.setBBox(bbox);
n0Form.setResources(new PDResources());
n0Form.setFormType(1);
innerFormResource.put(COSName.getPDFName("n0"), n0Form);
Log.i("PdfBox-Android", "Created background layer form");
}
@Override
public void injectProcSetArray(PDFormXObject innerForm, PDPage page,
PDResources innerFormResources, PDResources imageFormResources,
PDResources holderFormResources, COSArray procSet)
{
innerForm.getResources().getCOSObject().setItem(COSName.PROC_SET, procSet);
page.getCOSObject().setItem(COSName.PROC_SET, procSet);
innerFormResources.getCOSObject().setItem(COSName.PROC_SET, procSet);
imageFormResources.getCOSObject().setItem(COSName.PROC_SET, procSet);
holderFormResources.getCOSObject().setItem(COSName.PROC_SET, procSet);
Log.i("PdfBox-Android", "Inserted ProcSet to PDF");
}
@Override
public void injectAppearanceStreams(PDStream holderFormStream, PDStream innerFormStream,
PDStream imageFormStream, COSName imageFormName,
COSName imageName, COSName innerFormName,
PDVisibleSignDesigner properties) throws IOException
{
// Use width and height of BBox as values for transformation matrix.
int width = (int) this.getStructure().getFormatterRectangle().getWidth();
int height = (int) this.getStructure().getFormatterRectangle().getHeight();
String imgFormContent = "q " + width + " 0 0 " + height + " 0 0 cm /" + imageName.getName() + " Do Q\n";
String holderFormContent = "q 1 0 0 1 0 0 cm /" + innerFormName.getName() + " Do Q\n";
String innerFormContent = "q 1 0 0 1 0 0 cm /n0 Do Q q 1 0 0 1 0 0 cm /" + imageFormName.getName() + " Do Q\n";
appendRawCommands(pdfStructure.getHolderFormStream().createOutputStream(), holderFormContent);
appendRawCommands(pdfStructure.getInnerFormStream().createOutputStream(), innerFormContent);
appendRawCommands(pdfStructure.getImageFormStream().createOutputStream(), imgFormContent);
Log.i("PdfBox-Android", "Injected appearance stream to pdf");
}
public void appendRawCommands(OutputStream os, String commands) throws IOException
{
os.write(commands.getBytes("UTF-8"));
os.close();
}
@Override
public void createVisualSignature(PDDocument template)
{
pdfStructure.setVisualSignature(template.getDocument());
Log.i("PdfBox-Android", "Visible signature has been created");
}
@Override
public void createWidgetDictionary(PDSignatureField signatureField,
PDResources holderFormResources) throws IOException
{
COSDictionary widgetDict = signatureField.getWidgets().get(0).getCOSObject();
widgetDict.setNeedToBeUpdated(true);
widgetDict.setItem(COSName.DR, holderFormResources.getCOSObject());
pdfStructure.setWidgetDictionary(widgetDict);
Log.i("PdfBox-Android", "WidgetDictionary has been created");
}
@Override
public void closeTemplate(PDDocument template) throws IOException
{
template.close();
pdfStructure.getTemplate().close();
}
}
| |
/**
* VariableRecordExp.java
* ---------------------------------
* Copyright (c) 2016
* RESOLVE Software Research Group
* School of Computing
* Clemson University
* All rights reserved.
* ---------------------------------
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
package edu.clemson.cs.r2jt.absyn;
import edu.clemson.cs.r2jt.collections.List;
import edu.clemson.cs.r2jt.data.Location;
import edu.clemson.cs.r2jt.data.PosSymbol;
import edu.clemson.cs.r2jt.collections.Iterator;
public class VariableRecordExp extends VariableExp {
// ===========================================================
// Variables
// ===========================================================
/** The location member. */
private Location location;
/** The qualifier member. */
private PosSymbol qualifier;
/** The name member. */
private PosSymbol name;
/** The fields member. */
private List<VariableExp> fields;
// ===========================================================
// Constructors
// ===========================================================
public VariableRecordExp() {};
public VariableRecordExp(Location location, PosSymbol qualifier,
PosSymbol name, List<VariableExp> fields) {
this.location = location;
this.qualifier = qualifier;
this.name = name;
this.fields = fields;
}
public Exp substituteChildren(java.util.Map<Exp, Exp> substitutions) {
List<VariableExp> newFields = new List<VariableExp>();
for (VariableExp v : fields) {
newFields.add((VariableExp) substitute(v, substitutions));
}
return new VariableRecordExp(location, qualifier, name, newFields);
}
// ===========================================================
// Accessor Methods
// ===========================================================
// -----------------------------------------------------------
// Get Methods
// -----------------------------------------------------------
/** Returns the value of the location variable. */
public Location getLocation() {
return location;
}
/** Returns the value of the qualifier variable. */
public PosSymbol getQualifier() {
return qualifier;
}
/** Returns the value of the name variable. */
public PosSymbol getName() {
return name;
}
/** Returns the value of the fields variable. */
public List<VariableExp> getFields() {
return fields;
}
// -----------------------------------------------------------
// Set Methods
// -----------------------------------------------------------
/** Sets the location variable to the specified value. */
public void setLocation(Location location) {
this.location = location;
}
/** Sets the qualifier variable to the specified value. */
public void setQualifier(PosSymbol qualifier) {
this.qualifier = qualifier;
}
/** Sets the name variable to the specified value. */
public void setName(PosSymbol name) {
this.name = name;
}
/** Sets the fields variable to the specified value. */
public void setFields(List<VariableExp> fields) {
this.fields = fields;
}
// ===========================================================
// Public Methods
// ===========================================================
/** Accepts a ResolveConceptualVisitor. */
public void accept(ResolveConceptualVisitor v) {
v.visitVariableRecordExp(this);
}
/** Returns a formatted text string of this class. */
public String asString(int indent, int increment) {
StringBuffer sb = new StringBuffer();
printSpace(indent, sb);
sb.append("VariableRecordExp\n");
if (qualifier != null) {
sb.append(qualifier.asString(indent + increment, increment));
}
if (name != null) {
sb.append(name.asString(indent + increment, increment));
}
if (fields != null) {
sb.append(fields.asString(indent + increment, increment));
}
return sb.toString();
}
/** Returns a formatted text string of this class. */
public String toString(int indent) {
StringBuffer sb = new StringBuffer();
/*
printSpace(indent, sb);
sb.append("VariableRecordExp\n");
if (qualifier != null) {
sb.append(qualifier.asString(indent+increment,increment));
}
if (name != null) {
sb.append(name.asString(indent+increment,increment));
}
if (fields != null) {
sb.append(fields.asString(indent+increment,increment));
}
*/
return sb.toString();
}
/** Returns true if the variable is found in any sub expression
of this one. **/
public boolean containsVar(String varName, boolean IsOldExp) {
Iterator<VariableExp> i = fields.iterator();
while (i.hasNext()) {
VariableExp temp = i.next();
if (temp != null) {
if (temp.containsVar(varName, IsOldExp)) {
return true;
}
}
}
return false;
}
public List<Exp> getSubExpressions() {
List<Exp> list = new List<Exp>();
Iterator<VariableExp> fieldsIt = fields.iterator();
while (fieldsIt.hasNext()) {
list.add((Exp) (fieldsIt.next()));
}
return list;
}
public void setSubExpression(int index, Exp e) {
fields.set(index, (VariableExp) e);
}
public Exp copy() {
VariableRecordExp result =
new VariableRecordExp(location, qualifier, name, fields);
result.setMathType(myMathType);
result.setMathTypeValue(myMathTypeValue);
return result;
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package docking.widgets.fieldpanel.field;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import javax.swing.JComponent;
import org.apache.commons.lang3.StringUtils;
import docking.widgets.fieldpanel.internal.FieldBackgroundColorManager;
import docking.widgets.fieldpanel.internal.PaintContext;
import docking.widgets.fieldpanel.support.*;
import generic.json.Json;
/**
* A {@link TextField} that takes in other TextFields.
*
* <P>This class allows clients to create custom text layout behavior by combining individual
* TextFields that dictate layout behavior. As an example, consider this rendering:
* <pre>
* 1) This is some text...
* 2) This
* is
* more
* text
* </pre>
* In this example, 1) is a row of text inside of a {@link ClippingTextField}. Row 2) is a
* multi-line text rendering specified in a single {@link FlowLayoutTextField}, using a
* narrow width to trigger the field to place each element on its own line.
*/
public class CompositeVerticalLayoutTextField implements TextField {
// the view rows, which may be a clipped version of the client fields
private List<FieldRow> fieldRows;
private int startX;
private int width;
private int preferredWidth;
private HighlightFactory hlFactory;
private int height;
private int heightAbove;
private int numRows;
private int numDataRows;
private boolean isPrimary;
private String fullText;
// all text, including any clipped text; lines.size() == fields.size()
private List<String> lines;
// used in the getText() method to separate rows without adding newlines
private String rowSeparator;
private boolean isClipped;
public CompositeVerticalLayoutTextField(List<TextField> fields, int startX, int width,
int maxLines, HighlightFactory hlFactory) {
this(fields, startX, width, maxLines, hlFactory, " ");
}
protected CompositeVerticalLayoutTextField(List<TextField> fields, int startX, int width,
int maxLines, HighlightFactory hlFactory, String rowSeparator) {
this.startX = startX;
this.width = width;
this.hlFactory = hlFactory;
this.rowSeparator = rowSeparator;
lines = generateLines(fields);
fullText = generateText(fields, rowSeparator);
heightAbove = (fields.get(0)).getHeightAbove();
fieldRows = layoutRows(fields, maxLines);
calculateRows(fields);
calculatePreferredWidth();
calculateHeight();
}
private List<String> generateLines(List<TextField> fields) {
List<String> list = new ArrayList<>();
for (TextField field : fields) {
list.add(field.getTextWithLineSeparators());
}
return list;
}
private String generateText(List<TextField> fields, String delimiter) {
StringBuilder buf = new StringBuilder();
for (TextField element : fields) {
buf.append(element.getText()).append(delimiter);
}
return buf.toString();
}
private List<FieldRow> layoutRows(List<TextField> fields, int maxLines) {
List<FieldRow> newSubFields = new ArrayList<>();
int startY = -heightAbove;
int ySoFar = startY;
int currentRow = 0;
boolean tooManyLines = fields.size() > maxLines;
for (int i = 0; i < fields.size() && i < maxLines; i++) {
TextField field = fields.get(i);
if (tooManyLines && (i == maxLines - 1)) {
FieldElement element = field.getFieldElement(0, 0);
TextField newField = createClippedField(element);
newSubFields.add(new FieldRow(newField, currentRow, ySoFar));
isClipped = true;
}
else {
newSubFields.add(new FieldRow(field, currentRow, ySoFar));
isClipped |= field.isClipped();
}
ySoFar += field.getHeight();
currentRow += field.getNumRows();
}
isClipped |= tooManyLines;
return newSubFields;
}
private ClippingTextField createClippedField(FieldElement element) {
FieldElement[] elements = new FieldElement[] {
element,
new StrutFieldElement(500)
};
FieldElement compositeElement = new CompositeFieldElement(elements);
return new ClippingTextField(startX, width, compositeElement, hlFactory);
}
private void calculateHeight() {
for (FieldRow row : fieldRows) {
height += row.field.getHeight();
}
}
private void calculatePreferredWidth() {
preferredWidth = 0;
for (FieldRow row : fieldRows) {
preferredWidth = Math.max(preferredWidth, row.field.getPreferredWidth());
}
}
private void calculateRows(List<TextField> fields) {
numRows = 0;
for (FieldRow row : fieldRows) {
numRows += row.field.getNumRows();
}
numDataRows = 0;
for (TextField field : fields) {
numDataRows += field.getNumDataRows();
}
}
@Override
public String toString() {
return getText();
}
@Override
public int getWidth() {
return width;
}
@Override
public int getPreferredWidth() {
return preferredWidth;
}
@Override
public int getHeight() {
return height;
}
@Override
public int getStartX() {
return startX;
}
@Override
public int getNumDataRows() {
return numDataRows;
}
@Override
public int getNumRows() {
return numRows;
}
@Override
public int getHeightAbove() {
return heightAbove;
}
@Override
public int getHeightBelow() {
return height - heightAbove;
}
@Override
public boolean isPrimary() {
return isPrimary;
}
@Override
public void rowHeightChanged(int newHeightAbove, int newHeightBelow) {
// don't care
}
@Override
public boolean isClipped() {
return isClipped;
}
@Override
public void setPrimary(boolean state) {
isPrimary = state;
}
@Override
public String getText() {
return fullText;
}
@Override
public String getTextWithLineSeparators() {
return StringUtils.join(lines, '\n');
}
@Override
public void paint(JComponent c, Graphics g, PaintContext context, Rectangle clip,
FieldBackgroundColorManager colorManager, RowColLocation cursorLocation,
int rowHeight) {
// the graphics have been translated such that the first line of text's base line is
// at y=0 (So if we are not clipped, we will drawing from a negative value that is the
// font's height above the baseline (-heightAbove) to rowHeight (-heightAbove)
int myStartY = -heightAbove;
int myEndY = myStartY + rowHeight;
int clipStartY = clip.y;
int clipEndY = clip.y + clip.height;
Color fieldBackgroundColor = colorManager.getBackgroundColor();
if (fieldBackgroundColor != null) {
g.setColor(fieldBackgroundColor);
// restrict background rectangle to clipping rectangle
int startY = Math.max(myStartY, clipStartY);
int endY = Math.min(myEndY, clipEndY);
int clippedHeight = endY - startY;
g.fillRect(startX, startY, width, clippedHeight);
}
FieldRow cursorRow = null;
if (cursorLocation != null) {
cursorRow = getFieldRow(cursorLocation.row());
}
int startY = myStartY;
int translatedY = 0;
for (int i = 0; i < fieldRows.size(); i++) {
// if past clipping region we are done
if (startY > clipEndY) {
break;
}
FieldRow fieldRow = fieldRows.get(i);
TextField field = fieldRow.field;
int subFieldHeight = fieldRow.field.getHeight();
int endY = startY + subFieldHeight;
// if any part of the line is in the clip region, draw it
if (endY >= clipStartY) {
RowColLocation cursor = null;
if (fieldRow == cursorRow) {
int relativeRow = fieldRow.getRelativeRow(cursorLocation.row());
cursor = cursorLocation.withRow(relativeRow);
}
field.paint(c, g, context, clip, colorManager, cursor, rowHeight);
}
// translate for next row of text
startY += subFieldHeight;
g.translate(0, subFieldHeight);
translatedY += subFieldHeight;
}
// restore the graphics to where it was when we started.
g.translate(0, -translatedY);
}
@Override
public boolean contains(int x, int y) {
if ((x >= startX) && (x < startX + width) && (y >= -heightAbove) &&
(y < height - heightAbove)) {
return true;
}
return false;
}
public String getRowSeparator() {
return rowSeparator;
}
private FieldRow getFieldRow(int screenRow) {
int currentRow = 0;
for (FieldRow row : fieldRows) {
int n = row.field.getNumRows();
if (currentRow + n > screenRow) {
return row;
}
currentRow += n;
}
return fieldRows.get(fieldRows.size() - 1);
}
private FieldRow getFieldRowFromDataRow(int dataRow) {
int currentRow = 0;
for (FieldRow row : fieldRows) {
int length = row.field.getNumDataRows();
if (currentRow + length > dataRow) {
return row;
}
currentRow += length;
}
return fieldRows.get(fieldRows.size() - 1);
}
// get all rows from 0 to max inclusive
private List<FieldRow> getAllRows(int maxRow) {
int currentRow = 0;
List<FieldRow> list = new ArrayList<>();
for (FieldRow row : fieldRows) {
if (currentRow > maxRow) {
break;
}
list.add(row);
currentRow += row.field.getNumRows();
}
return list;
}
// for testing
protected List<TextField> getAllRowsUpTo(int maxRowInclusive) {
return getAllRows(maxRowInclusive)
.stream()
.map(fieldRow -> fieldRow.field)
.collect(Collectors.toList());
}
@Override
public FieldElement getFieldElement(int screenRow, int screenColumn) {
FieldRow fieldRow = getFieldRow(screenRow);
int relativeRow = fieldRow.getRelativeRow(screenRow);
return fieldRow.field.getFieldElement(relativeRow, screenColumn);
}
@Override
public int getNumCols(int row) {
FieldRow fieldRow = getFieldRow(row);
int relativeRow = fieldRow.getRelativeRow(row);
return fieldRow.field.getNumCols(relativeRow);
}
@Override
public int getX(int row, int col) {
FieldRow fieldRow = getFieldRow(row);
int relativeRow = fieldRow.getRelativeRow(row);
return fieldRow.field.getX(relativeRow, col);
}
@Override
public int getY(int row) {
int startY = -heightAbove;
int ySoFar = startY;
List<FieldRow> rows = getAllRows(row);
int lastHeight = 0;
for (FieldRow fieldRow : rows) {
ySoFar += lastHeight;
if (fieldRow.displayRowOffset >= row) {
return ySoFar;
}
lastHeight = fieldRow.field.getHeight();
}
return ySoFar;
}
@Override
public int getRow(int y) {
// our start y value is our baseline - the heigh above the baseline
int startY = -heightAbove;
if (y < startY) {
return 0;
}
int ySoFar = startY;
for (FieldRow fieldRow : fieldRows) {
int fieldHeight = fieldRow.field.getHeight();
int bottom = fieldHeight + ySoFar;
if (bottom > y) {
int relativeY = y - ySoFar;
int relativeRow = fieldRow.field.getRow(relativeY);
int displayRow = fieldRow.fromRelativeRow(relativeRow);
return displayRow;
}
ySoFar += fieldHeight;
}
return getNumRows() - 1;
}
@Override
public int getCol(int row, int x) {
FieldRow fieldRow = getFieldRow(row);
int relativeRow = fieldRow.getRelativeRow(row);
return fieldRow.field.getCol(relativeRow, x);
}
@Override
public boolean isValid(int row, int col) {
if ((row < 0) || (row >= getNumRows())) {
return false;
}
FieldRow fieldRow = getFieldRow(row);
int relativeRow = fieldRow.getRelativeRow(row);
return fieldRow.field.isValid(relativeRow, col);
}
@Override
public Rectangle getCursorBounds(int row, int col) {
if ((row < 0) || (row >= getNumRows())) {
return null;
}
List<FieldRow> rows = getAllRows(row);
FieldRow cursorRow = rows.get(rows.size() - 1);
int relativeRow = cursorRow.getRelativeRow(row);
Rectangle r = cursorRow.field.getCursorBounds(relativeRow, col);
for (int i = 0; i < rows.size() - 1; i++) {
FieldRow previousRow = rows.get(i);
r.y += previousRow.field.getHeight();
}
return r;
}
@Override
public int getScrollableUnitIncrement(int topOfScreen, int direction, int max) {
if ((topOfScreen < -heightAbove) || (topOfScreen > height - heightAbove)) {
return max;
}
int row = getRow(topOfScreen);
int y = getY(row);
int rowOffset = topOfScreen - y;
FieldRow fieldRow = getFieldRow(row);
int rowHeight = fieldRow.field.getHeight();
if (direction > 0) { // if scrolling down
return rowHeight - rowOffset;
}
else if (rowOffset == 0) {
return -rowHeight;
}
else {
return -rowOffset;
}
}
@Override
public RowColLocation screenToDataLocation(int screenRow, int screenColumn) {
screenRow = Math.min(screenRow, numRows - 1);
screenRow = Math.max(screenRow, 0);
FieldRow fieldRow = getFieldRow(screenRow);
screenColumn = Math.min(screenColumn, fieldRow.field.getText().length());
screenColumn = Math.max(screenColumn, 0);
int relativeRow = fieldRow.getRelativeRow(screenRow);
return fieldRow.field.screenToDataLocation(relativeRow, screenColumn);
}
@Override
public RowColLocation dataToScreenLocation(int dataRow, int dataColumn) {
FieldRow fieldRow = getFieldRowFromDataRow(dataRow);
RowColLocation location = fieldRow.field.dataToScreenLocation(dataRow, dataColumn);
int relativeRow = fieldRow.fromRelativeRow(location.row());
return location.withRow(relativeRow);
}
@Override
public int screenLocationToTextOffset(int row, int col) {
if (row >= numRows) {
return getText().length();
}
int extraSpace = rowSeparator.length();
int len = 0;
List<FieldRow> rows = getAllRows(row);
int n = rows.size() - 1;
for (int i = 0; i < n; i++) {
FieldRow fieldRow = rows.get(i);
len += fieldRow.field.getText().length() + extraSpace;
}
FieldRow lastRow = rows.get(n);
int relativeRow = lastRow.getRelativeRow(row);
len += lastRow.field.screenLocationToTextOffset(relativeRow, col);
return len;
}
@Override
public RowColLocation textOffsetToScreenLocation(int textOffset) {
int extraSpace = rowSeparator.length();
int n = fieldRows.size();
int textOffsetSoFar = 0;
for (int i = 0; i < n; i++) {
if (textOffsetSoFar > textOffset) {
break;
}
FieldRow fieldRow = fieldRows.get(i);
int length = fieldRow.field.getText().length() + extraSpace;
int end = textOffsetSoFar + length;
if (end > textOffset) {
int relativeOffset = textOffset - textOffsetSoFar;
RowColLocation location = fieldRow.field.textOffsetToScreenLocation(relativeOffset);
int screenRow = fieldRow.fromRelativeRow(location.row());
return location.withRow(screenRow);
}
textOffsetSoFar += length;
}
FieldRow lastRow = fieldRows.get(fieldRows.size() - 1);
int length = lastRow.field.getText().length();
return new DefaultRowColLocation(numRows - 1, length);
}
private class FieldRow {
private TextField field;
private int displayRowOffset;
private int yOffset;
FieldRow(TextField field, int rowOffset, int yOffset) {
this.field = field;
this.displayRowOffset = rowOffset;
}
// used to turn given row into 0 for this composite field
int getRelativeRow(int displayRow) {
return displayRow - displayRowOffset;
}
int fromRelativeRow(int relativeRow) {
return relativeRow + displayRowOffset;
}
int getY() {
return yOffset;
}
@Override
public String toString() {
return Json.toString(this);
}
}
}
| |
package com.gdxjam.utils;
import com.badlogic.ashley.core.Entity;
import com.badlogic.ashley.core.PooledEngine;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.g2d.ParticleEffectPool.PooledEffect;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.physics.box2d.Body;
import com.badlogic.gdx.physics.box2d.BodyDef;
import com.badlogic.gdx.physics.box2d.BodyDef.BodyType;
import com.badlogic.gdx.physics.box2d.CircleShape;
import com.badlogic.gdx.physics.box2d.EdgeShape;
import com.badlogic.gdx.physics.box2d.Filter;
import com.badlogic.gdx.physics.box2d.FixtureDef;
import com.badlogic.gdx.physics.box2d.PolygonShape;
import com.gdxjam.Assets;
import com.gdxjam.behaviors.control.CorvetteControlBehavior;
import com.gdxjam.behaviors.control.CruiserControlBehavior;
import com.gdxjam.behaviors.control.FighterControlBehavior;
import com.gdxjam.components.Components;
import com.gdxjam.components.ControlComponent;
import com.gdxjam.components.DecayComponent;
import com.gdxjam.components.FSMComponent;
import com.gdxjam.components.FactionComponent;
import com.gdxjam.components.FactionComponent.Faction;
import com.gdxjam.components.GunportComponent;
import com.gdxjam.components.HealthComponent;
import com.gdxjam.components.IdentifyingComponent;
//import com.gdxjam.components.IdentifyingComponent;
import com.gdxjam.components.ParalaxComponent;
import com.gdxjam.components.ParticleComponent;
import com.gdxjam.components.PhysicsComponent;
import com.gdxjam.components.ProjectileComponent;
import com.gdxjam.components.ResourceComponent;
import com.gdxjam.components.SpriteComponent;
import com.gdxjam.components.SteerableComponent;
import com.gdxjam.components.SteeringBehaviorComponent;
import com.gdxjam.components.TargetComponent;
import com.gdxjam.systems.ParticleSystem;
import com.gdxjam.systems.ParticleSystem.ParticleType;
import com.gdxjam.systems.PhysicsSystem;
/** @author Torin Wiebelt (Twiebs) Creates Ashley entities using a builder */
public class EntityFactory {
private static final String TAG = "[" + EntityFactory.class.getSimpleName() + "]";
private static PooledEngine engine;
private static PhysicsSystem physicsSystem;
private static EntityBuilder builder = new EntityBuilder();
private static PhysicsBuilder physicsBuilder = new PhysicsBuilder();
public static Entity createShip(Faction faction, Vector2 position) {
Entity entity = builder.createEntity(EntityCategory.UNIT, position).physicsBody(BodyType.DynamicBody).circleCollider(Constants.unitRadius, 1.0f)
.damping(1.5f, 1.0f).steerable(Constants.unitRadius).steeringBehavior().health(100).faction(faction).target()
.sprite(Assets.spacecraft.ships.get(faction.ordinal()), Constants.unitRadius * 2, Constants.unitRadius * 2).getWithoutAdding();
PhysicsComponent physicsComp = Components.PHYSICS.get(entity);
Components.STEERABLE.get(entity).setIndependentFacing(true);
return entity;
}
public static Entity createPlayer(Faction faction, Vector2 position, long count) {
Entity entity = EntityFactory.createShip(faction, position);
builder.entity = entity;
builder.setCollider(faction);
builder.control(faction).uuid(count).addToEngine();
return entity;
}
public static Entity createGun(Vector2 position) {
Entity entity = builder.createEntity(EntityCategory.GUN, position).physicsBody(BodyType.DynamicBody).circleCollider(Constants.unitRadius, 0.5f)
.getWithoutAdding();
return entity;
}
public static Entity createProjectile(Vector2 position, Vector2 velocity, float radius, Faction faction, int damage) {
Entity entity = builder.createEntity(EntityCategory.PROJECTILE, position).physicsBody(BodyType.DynamicBody).circleSensor(radius)
.filter(EntityCategory.PROJECTILE, 0, EntityCategory.UNIT | EntityCategory.RESOURCE | EntityCategory.MOTHERSHIP).faction(faction)
.sprite(Assets.projectile.projectiles.get(faction.ordinal()), radius * 2, radius * 2).getWithoutAdding();
ProjectileComponent projectileComp = engine.createComponent(ProjectileComponent.class).init(damage);
entity.add(projectileComp);
entity.add(engine.createComponent(DecayComponent.class).init(Constants.projectileDecayTime));
PhysicsComponent physicsComp = Components.PHYSICS.get(entity);
physicsComp.getBody().setBullet(true);
physicsComp.getBody().setLinearVelocity(velocity);
physicsComp.getBody().setTransform(position, velocity.angle());
engine.addEntity(entity);
return entity;
}
public static Entity createAsteroid(Vector2 position, float radius) {
Entity entity = builder.createEntity(EntityCategory.RESOURCE, position).physicsBody(BodyType.StaticBody).circleCollider(radius, 50.0f)
.filter(EntityCategory.RESOURCE, 0, EntityCategory.PROJECTILE | EntityCategory.SQUAD | EntityCategory.UNIT)
.resource((int) (Constants.baseAsteroidResourceAmt * radius)).steerable(radius).faction(Faction.NONE)
.sprite(Assets.space.asteroids.random(), radius * 2, radius * 2).addToEngine();
return entity;
}
public static Entity createParticle(Vector2 position, ParticleType type) {
Entity entity = builder.createEntity(EntityCategory.GRAPHICS, position).particle(type).addToEngine();
return entity;
}
public static Entity createBoundry(Vector2 start, Vector2 end) {
Entity entity = builder.createEntity(EntityCategory.WALL, new Vector2(0, 0)).physicsBody(BodyType.StaticBody).getWithoutAdding();
FixtureDef def = new FixtureDef();
EdgeShape edge = new EdgeShape();
edge.set(start, end);
def.shape = edge;
def.filter.categoryBits = EntityCategory.WALL;
Components.PHYSICS.get(entity).getBody().createFixture(edge, 1.0f);
engine.addEntity(entity);
return entity;
}
public static Entity createBackgroundArt(Vector2 position, float width, float height, TextureRegion region, int layer) {
Entity entity = builder.createEntity(EntityCategory.GRAPHICS, position).sprite(region, width, height).getWithoutAdding();
entity.add(engine.createComponent(ParalaxComponent.class).init(position.x, position.y, width, height, layer));
engine.addEntity(entity);
return entity;
}
/**
* Called when the GameManager first initializes a new engine
*
* @param engine
* The engine that the factory will use to create its
* entities
*/
public static void setEngine(PooledEngine engine) {
EntityFactory.engine = engine;
physicsSystem = engine.getSystem(PhysicsSystem.class);
}
/**
* Creates an entity from the engine when first instantiated Exit builder
* by calling addToEngine() or getWithoutAdding()
*/
public static class EntityBuilder {
private static final BodyType DEFAULT_BODY = BodyType.DynamicBody;
public Vector2 position;
public Entity entity;
public EntityBuilder createEntity(int categoryBits, Vector2 position) {
entity = engine.createEntity();
entity.flags = categoryBits;
this.position = position;
return this;
}
public EntityBuilder control(Faction faction) {
control(30, faction);
return this;
}
public EntityBuilder control(float radius, Faction faction) {
if (faction == Faction.FACTION0) {
entity.add(engine.createComponent(ControlComponent.class).init(new FighterControlBehavior(entity, engine, radius)));
}
if (faction == Faction.FACTION1) {
entity.add(engine.createComponent(ControlComponent.class).init(new CruiserControlBehavior(entity, engine, radius)));
}
if (faction == Faction.FACTION2) {
entity.add(engine.createComponent(ControlComponent.class).init(new CorvetteControlBehavior(entity, engine, radius)));
}
return this;
}
public EntityBuilder particle(ParticleType type) {
PooledEffect effect = engine.getSystem(ParticleSystem.class).createEffect(position, type);
entity.add(engine.createComponent(ParticleComponent.class).init(effect));
return this;
}
public PhysicsBuilder buildPhysics(BodyType type) {
return physicsBuilder.reset(type, position, entity);
}
public EntityBuilder physicsBody(BodyType type) {
BodyDef def = new BodyDef();
def.type = type;
def.position.set(position);
Body body = physicsSystem.createBody(def);
body.setUserData(entity);
PhysicsComponent physics = engine.createComponent(PhysicsComponent.class).init(body);
entity.add(physics);
return this;
}
public EntityBuilder damping(float angular, float linear) {
if (Components.PHYSICS.has(entity)) {
PhysicsComponent physics = Components.PHYSICS.get(entity);
physics.getBody().setAngularDamping(angular);
physics.getBody().setLinearDamping(linear);
} else {
Gdx.app.error(TAG, "entity is missing physics component!");
}
return this;
}
public EntityBuilder stateMachine() {
FSMComponent stateMachineComp = engine.createComponent(FSMComponent.class).init(entity);
entity.add(stateMachineComp);
return this;
}
public EntityBuilder steeringBehavior() {
SteeringBehaviorComponent behaviorComp = engine.createComponent(SteeringBehaviorComponent.class);
entity.add(behaviorComp);
return this;
}
public EntityBuilder resource(int amount) {
ResourceComponent resourceComp = engine.createComponent(ResourceComponent.class).init(amount);
entity.add(resourceComp);
return this;
}
public EntityBuilder category(int categoryBits) {
entity.flags = categoryBits;
return this;
}
public EntityBuilder filter(int categoryBits, int groupIndex, int maskBits) {
entity.flags = categoryBits;
Filter filter = new Filter();
filter.categoryBits = (short) categoryBits;
filter.groupIndex = (short) groupIndex;
filter.maskBits = (short) maskBits;
// TODO make EntityBuilder filter beter
Components.PHYSICS.get(entity).getBody().getFixtureList().get(0).setFilterData(filter);
return this;
}
public EntityBuilder target() {
entity.add(engine.createComponent(TargetComponent.class));
return this;
}
public EntityBuilder gunport(Vector2 origin) {
entity.add(engine.createComponent(GunportComponent.class).init(origin));
return this;
}
public EntityBuilder faction(Faction faction) {
entity.add(engine.createComponent(FactionComponent.class).init(faction));
return this;
}
public EntityBuilder steerable(float radius) {
PhysicsComponent physics = Components.PHYSICS.get(entity);
if (physics == null) {
Gdx.app.error(TAG, "cannot create a steerable without physics!");
return this;
}
SteerableComponent steerable = engine.createComponent(SteerableComponent.class).init(physics.getBody(), radius);
entity.add(steerable);
return this;
}
public EntityBuilder setCollider(Faction faction){
if(faction == Faction.FACTION0){
return circleCollider(1, 1);
}
if(faction == Faction.FACTION1 || faction == Faction.FACTION2){
return boxCollider(1, 1, 1);
}
else{return null;}
}
public EntityBuilder circleCollider(float radius, float density) {
CircleShape shape = new CircleShape();
shape.setRadius(radius);
PhysicsComponent physics = Components.PHYSICS.get(entity);
if (physics == null) {
physicsBody(DEFAULT_BODY);
}
FixtureDef def = new FixtureDef();
def.shape = shape;
def.density = density;
def.isSensor = true;
physics.getBody().createFixture(def);
shape = new CircleShape();
shape.setRadius(radius - 2);
shape.setPosition(new Vector2(2, 0));
FixtureDef fixture = new FixtureDef();
fixture.shape = shape;
fixture.density = density;
fixture.isSensor = true;
physics.getBody().createFixture(shape, density);
return this;
}
// Working on this not done yet but had to go - Nate
public EntityBuilder polygonCollider(Vector2[] points, float density) {
PolygonShape shape = new PolygonShape(); //
shape.set(points);
PhysicsComponent physics = Components.PHYSICS.get(entity);
if (physics == null) {
physicsBody(DEFAULT_BODY);
}
FixtureDef def = new FixtureDef();
def.shape = shape;
def.density = density;
def.isSensor = true;
physics.getBody().createFixture(def);
shape = new PolygonShape();
shape.set(points);
FixtureDef fixture = new FixtureDef();
fixture.shape = shape;
fixture.density = density;
fixture.isSensor = true;
physics.getBody().createFixture(shape, density);
return this;
}
public EntityBuilder boxCollider(float height, float width, float density) {
PolygonShape shape = new PolygonShape(); //
shape.setAsBox(height, width);
PhysicsComponent physics = Components.PHYSICS.get(entity);
if (physics == null) {
physicsBody(DEFAULT_BODY);
}
FixtureDef def = new FixtureDef();
def.shape = shape;
def.density = density;
def.isSensor = true;
physics.getBody().createFixture(def);
shape = new PolygonShape();
shape.setAsBox(height, width);
FixtureDef fixture = new FixtureDef();
fixture.shape = shape;
fixture.density = density;
fixture.isSensor = true;
physics.getBody().createFixture(shape, density);
return this;
}
public EntityBuilder circleSensor(float radius) {
CircleShape shape = new CircleShape();
shape.setRadius(radius);
PhysicsComponent physics = Components.PHYSICS.get(entity);
if (physics == null) {
physicsBody(DEFAULT_BODY);
}
FixtureDef fixtureDef = new FixtureDef();
fixtureDef.isSensor = true;
fixtureDef.shape = shape;
physics.getBody().createFixture(fixtureDef);
return this;
}
public EntityBuilder rangeSensor(float range, float arc) {
Body body;
if (Components.PHYSICS.has(entity)) {
body = Components.PHYSICS.get(entity).getBody();
} else {
Gdx.app.error(TAG, "can not add range sensor : entity does not have a physics component!");
return this;
}
Vector2 vertices[] = new Vector2[8];
for (int i = 0; i <= 7; i++) {
vertices[i] = new Vector2(0, 0);
}
for (int i = 0; i < 7; i++) {
float angle = (i / 6.0f * arc * MathUtils.degRad) - (90 * MathUtils.degRad);
vertices[i + 1].set(range * MathUtils.cos(angle), range * MathUtils.sin(angle));
}
PolygonShape poly = new PolygonShape();
poly.set(vertices);
FixtureDef sensorDef = new FixtureDef();
sensorDef.shape = poly;
sensorDef.isSensor = true;
body.createFixture(sensorDef);
poly.dispose();
return this;
}
public EntityBuilder health(int value) {
HealthComponent health = engine.createComponent(HealthComponent.class);
health.max = value;
health.value = value;
entity.add(health);
return this;
}
public EntityBuilder sprite(TextureRegion region, float width, float height) {
SpriteComponent spriteComp = engine.createComponent(SpriteComponent.class).init(region, position.x, position.y, width, height);
entity.add(spriteComp);
return this;
}
public EntityBuilder uuid(long uuid) {
entity.add(engine.createComponent(IdentifyingComponent.class).init(uuid));
return this;
}
public Entity addToEngine() {
engine.addEntity(entity);
return entity;
}
public Entity getWithoutAdding() {
return entity;
}
}
public static class PhysicsBuilder {
private Body body;
public PhysicsBuilder reset(BodyType type, Vector2 position, Entity entity) {
BodyDef def = new BodyDef();
def.type = type;
def.position.set(position);
body = physicsSystem.createBody(def);
body.setUserData(entity);
return this;
}
public EntityBuilder getBody() {
return builder;
}
//
}
}
| |
/**
* Copyright (c) 2015-2016 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.web.component.objectdetails;
import java.util.List;
import com.evolveum.midpoint.gui.api.util.WebComponentUtil;
import com.evolveum.midpoint.prism.PrismContainerDefinition;
import com.evolveum.midpoint.web.component.prism.ContainerStatus;
import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour;
import org.apache.commons.lang.Validate;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.extensions.markup.html.tabs.ITab;
import org.apache.wicket.markup.html.panel.Panel;
import com.evolveum.midpoint.gui.api.model.LoadableModel;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.web.component.AjaxButton;
import com.evolveum.midpoint.web.component.AjaxSubmitButton;
import com.evolveum.midpoint.web.component.TabbedPanel;
import com.evolveum.midpoint.web.component.form.Form;
import com.evolveum.midpoint.web.component.prism.ObjectWrapper;
import com.evolveum.midpoint.web.page.admin.PageAdminObjectDetails;
import com.evolveum.midpoint.web.page.admin.users.component.ExecuteChangeOptionsDto;
import com.evolveum.midpoint.web.page.admin.users.component.ExecuteChangeOptionsPanel;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType;
/**
* @author semancik
*
*/
public abstract class AbstractObjectMainPanel<O extends ObjectType> extends Panel {
public static final String PARAMETER_SELECTED_TAB = "tab";
private static final String ID_MAIN_FORM = "mainForm";
private static final String ID_TAB_PANEL = "tabPanel";
private static final String ID_EXECUTE_OPTIONS = "executeOptions";
private static final String ID_BACK = "back";
private static final String ID_SAVE = "save";
private static final String ID_PREVIEW_CHANGES = "previewChanges";
private static final Trace LOGGER = TraceManager.getTrace(AbstractObjectMainPanel.class);
private Form mainForm;
private LoadableModel<ObjectWrapper<O>> objectModel;
private LoadableModel<ExecuteChangeOptionsDto> executeOptionsModel = new LoadableModel<ExecuteChangeOptionsDto>(false) {
@Override
protected ExecuteChangeOptionsDto load() {
return ExecuteChangeOptionsDto.createFromSystemConfiguration();
}
};
public AbstractObjectMainPanel(String id, LoadableModel<ObjectWrapper<O>> objectModel, PageAdminObjectDetails<O> parentPage) {
super(id, objectModel);
Validate.notNull(objectModel, "Null object model");
this.objectModel = objectModel;
initLayout(parentPage);
}
@Override
protected void onConfigure() {
super.onConfigure();
TabbedPanel tabbedPanel = (TabbedPanel) get(ID_MAIN_FORM + ":" + ID_TAB_PANEL);
WebComponentUtil.setSelectedTabFromPageParameters(tabbedPanel, getPage().getPageParameters(),
PARAMETER_SELECTED_TAB);
}
public LoadableModel<ObjectWrapper<O>> getObjectModel() {
return objectModel;
}
public ObjectWrapper<O> getObjectWrapper() {
return objectModel.getObject();
}
public PrismObject<O> getObject() {
return objectModel.getObject().getObject();
}
public Form getMainForm() {
return mainForm;
}
private void initLayout(PageAdminObjectDetails<O> parentPage) {
mainForm = new Form<>(ID_MAIN_FORM, true);
add(mainForm);
initLayoutTabs(parentPage);
initLayoutOptions();
initLayoutButtons(parentPage);
}
protected void initLayoutTabs(final PageAdminObjectDetails<O> parentPage) {
List<ITab> tabs = createTabs(parentPage);
TabbedPanel<ITab> tabPanel = WebComponentUtil.createTabPanel(ID_TAB_PANEL, parentPage, tabs, null,
PARAMETER_SELECTED_TAB);
mainForm.add(tabPanel);
}
protected abstract List<ITab> createTabs(PageAdminObjectDetails<O> parentPage);
protected void initLayoutOptions() {
ExecuteChangeOptionsPanel optionsPanel = new ExecuteChangeOptionsPanel(ID_EXECUTE_OPTIONS,
executeOptionsModel, true, false);
optionsPanel.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return getOptionsPanelVisibility();
}
});
mainForm.add(optionsPanel);
}
protected void initLayoutButtons(PageAdminObjectDetails<O> parentPage) {
initLayoutPreviewButton(parentPage);
initLayoutSaveButton(parentPage);
initLayoutBackButton(parentPage);
}
protected void initLayoutSaveButton(final PageAdminObjectDetails<O> parentPage) {
AjaxSubmitButton saveButton = new AjaxSubmitButton(ID_SAVE, parentPage.createStringResource("pageAdminFocus.button.save")) {
@Override
protected void onSubmit(AjaxRequestTarget target,
org.apache.wicket.markup.html.form.Form<?> form) {
getDetailsPage().savePerformed(target);
}
@Override
protected void onError(AjaxRequestTarget target,
org.apache.wicket.markup.html.form.Form<?> form) {
target.add(parentPage.getFeedbackPanel());
}
};
saveButton.add(new VisibleEnableBehaviour(){
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return !getObjectWrapper().isReadonly();
}
@Override
public boolean isEnabled() {
//in case user isn't allowed to modify focus data but has
// e.g. #assign authorization, Save button is disabled on page load.
// Save button becomes enabled just if some changes are made
// on the Assignments tab (in the use case with #assign authorization)
PrismContainerDefinition def = getObjectWrapper().getDefinition();
if (ContainerStatus.MODIFYING.equals(getObjectWrapper().getStatus())
&& !def.canModify()){
return areSavePreviewButtonsEnabled();
}
return true;
}
});
mainForm.setDefaultButton(saveButton);
mainForm.add(saveButton);
}
// TEMPORARY
protected void initLayoutPreviewButton(final PageAdminObjectDetails<O> parentPage) {
AjaxSubmitButton previewButton = new AjaxSubmitButton(ID_PREVIEW_CHANGES, parentPage.createStringResource("pageAdminFocus.button.previewChanges")) {
@Override
protected void onSubmit(AjaxRequestTarget target,
org.apache.wicket.markup.html.form.Form<?> form) {
getDetailsPage().previewPerformed(target);
}
@Override
protected void onError(AjaxRequestTarget target,
org.apache.wicket.markup.html.form.Form<?> form) {
target.add(parentPage.getFeedbackPanel());
}
};
previewButton.add(new VisibleEnableBehaviour(){
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible(){
return AbstractObjectMainPanel.this.isPreviewButtonVisible();
}
@Override
public boolean isEnabled() {
PrismContainerDefinition def = getObjectWrapper().getDefinition();
if (ContainerStatus.MODIFYING.equals(getObjectWrapper().getStatus())
&& !def.canModify()){
return areSavePreviewButtonsEnabled();
}
return true;
}
});
mainForm.add(previewButton);
}
protected boolean isPreviewButtonVisible(){
return !getObjectWrapper().isReadonly();
}
protected void initLayoutBackButton(PageAdminObjectDetails<O> parentPage) {
AjaxButton back = new AjaxButton(ID_BACK, parentPage.createStringResource("pageAdminFocus.button.back")) {
@Override
public void onClick(AjaxRequestTarget target) {
backPerformed(target);
}
};
mainForm.add(back);
}
public ExecuteChangeOptionsDto getExecuteChangeOptionsDto() {
return executeOptionsModel.getObject();
}
private void backPerformed(AjaxRequestTarget target) {
getDetailsPage().redirectBack();
}
protected PageAdminObjectDetails<O> getDetailsPage() {
return (PageAdminObjectDetails<O>)getPage();
}
protected boolean getOptionsPanelVisibility(){
if (getObjectWrapper().isReadonly()){
return false;
}
PrismContainerDefinition def = getObjectWrapper().getDefinition();
if (ContainerStatus.MODIFYING.equals(getObjectWrapper().getStatus())
&& !def.canModify()){
return false;
}
return true;
}
public void reloadSavePreviewButtons(AjaxRequestTarget target){
target.add(AbstractObjectMainPanel.this.get(ID_MAIN_FORM).get(ID_PREVIEW_CHANGES));
target.add(AbstractObjectMainPanel.this.get(ID_MAIN_FORM).get(ID_SAVE));
}
protected boolean areSavePreviewButtonsEnabled(){
return false;
}
}
| |
package com.gcw.sapienza.places.utils;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.ThumbnailUtils;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import com.gcw.sapienza.places.PlacesApplication;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
/**
* Helps in the creation of thumbnail images
* Created by paolo on 18/02/15.
*/
public class BitmapUtils {
private static final String TAG = "BitmapUtils";
private static final String THUMB_FORMAT_EXTENSION = ".jpg";
private static final int THUMBNAIL_SIZE = 500;
//download is very low with 4MP picture 2024*2024
//try with 3.2 MP so 1800*1800
private static final int PIC_MAX_SIZE = 1800;
private BitmapUtils() {
}
public static File scaleImageToMaxSupportedSize(File imageFile) {
Bitmap src = BitmapFactory.decodeFile(imageFile.getAbsolutePath());
File result = null;
Log.d(TAG, "W " + src.getWidth() + " H " + src.getHeight());
if (src.getHeight() > PIC_MAX_SIZE || src.getWidth() > PIC_MAX_SIZE) {
Bitmap scaled = BitmapUtils.createThumbnailForImage(src, PIC_MAX_SIZE, PIC_MAX_SIZE);
Log.d(TAG, scaled.toString());
result = generateTemporaryPictureFileFromFile(imageFile);
writeBitmapToFile(scaled, result);
scaled.recycle();
}
src.recycle();
return result == null ? imageFile : result;
}
/**
* Scales an image maintaining aspect ratio
*
* @param original original image
* @param maxWidth maximum width
* @param maxHeight maximum height
* @return a Bitmap representing the thumbnail
*/
public static Bitmap createThumbnailForImage(Bitmap original, int maxWidth, int maxHeight) {
int width = original.getWidth();
int height = original.getHeight();
Log.v("Pictures", "Width and height are " + width + "--" + height);
if (width > height) {
// landscape
float ratio = (float) width / maxWidth;
width = maxWidth;
height = (int) (height / ratio);
} else if (height > width) {
// portrait
float ratio = (float) height / maxHeight;
height = maxHeight;
width = (int) (width / ratio);
} else {
// square
height = maxHeight;
width = maxWidth;
}
Log.v("Pictures", "after scaling Width and height are " + width + "--" + height);
return Bitmap.createScaledBitmap(original, width, height, true);
}
/**
* Scales an image maintaining aspect ratio.
*
* @param original image
* @return a Bitmap containing the thumbnail
*/
@SuppressWarnings("UnusedDeclaration")
public static Bitmap createThumbnailForImageRespectingProportions(Bitmap original) {
return BitmapUtils.createThumbnailForImage(original, THUMBNAIL_SIZE, THUMBNAIL_SIZE);
}
/**
* Scales an image maintaining aspect ratio
*
* @param original original image file
* @param maxWidth maximum width
* @param maxHeight maximum height
* @return a File representing the thumbnail
*/
public static File createThumbnailForImageRespectingProportions(File original, int maxWidth, int maxHeight) {
Bitmap src = BitmapFactory.decodeFile(original.getAbsolutePath());
Bitmap thumbnail = BitmapUtils.createThumbnailForImage(src, maxWidth, maxHeight);
File thumbFile = BitmapUtils.generateThumbnailFileForFile(original, false);
boolean result = writeBitmapToFile(thumbnail, thumbFile);
src.recycle();
thumbnail.recycle();
return result ? thumbFile : null;
}
/**
* Scales an image maintaining aspect ratio.
*
* @param original image
* @return a File containing the thumbnail
*/
public static File createThumbnailForImageRespectingProportions(File original) {
return BitmapUtils.createThumbnailForImageRespectingProportions(original, THUMBNAIL_SIZE, THUMBNAIL_SIZE);
}
/**
* Generates a thumbnail from a video
*
* @param video file containing the video
* @return File containing the thumbnail image
*/
public static File createTumbnailForVideo(File video) {
Bitmap thumbnail = ThumbnailUtils.createVideoThumbnail(video.getAbsolutePath(), MediaStore.Images.Thumbnails.MINI_KIND);
File thumbFile = BitmapUtils.generateThumbnailFileForFile(video, true);
boolean result = writeBitmapToFile(thumbnail, thumbFile);
thumbnail.recycle();
return result ? thumbFile : null;
}
/**
* Creates a thumbnail file starting from an image/video file
*
* @param original_file original file path
* @param video true if the file is a video. false if the file is a picture
* @return a file
*/
private static File generateThumbnailFileForFile(File original_file, boolean video) {
// Create an image file name
try {
String imageFileName = "thumb" + original_file.getName();
Log.d(TAG, original_file.getName());
File storageDir = Environment.getExternalStoragePublicDirectory(
video ? Environment.DIRECTORY_MOVIES : Environment.DIRECTORY_PICTURES);
return File.createTempFile(
imageFileName, /* prefix */
THUMB_FORMAT_EXTENSION, /* suffix */
storageDir /* directory */
);
} catch (IOException e) {
Log.d(TAG, "Error", e);
}
return null;
}
/**
* Generates a temporary file used to store the scaled image
*
* @param original original image file
* @return a temporary file
*/
private static File generateTemporaryPictureFileFromFile(File original) {
try {
File f = File.createTempFile("image",
".jpg",
PlacesApplication.getPlacesAppContext().getCacheDir());
f.deleteOnExit();
return f;
} catch (IOException e) {
Log.d(TAG, "Error Creating file", e);
//if file creation failed try following method
return generateThumbnailFileForFile(original, false);
}
}
/**
* Writes an image to file
*
* @param src the image to write
* @param file the destination file
* @return true in case of success, false otherwise
*/
private static boolean writeBitmapToFile(Bitmap src, File file) {
FileOutputStream out = null;
try {
out = new FileOutputStream(file);
src.compress(Bitmap.CompressFormat.JPEG, 70, out); // bmp is your Bitmap instance
return true;
} catch (Exception e) {
Log.d(TAG, "Error writing image", e);
e.printStackTrace();
} finally {
try {
if (out != null) {
out.close();
}
} catch (IOException e) {
Log.d(TAG, "Error writing image", e);
}
}
return false;
}
}
| |
/*
* Copyright 2013 S. Webber
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.projog.core.term;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.projog.TermFactory.atom;
import static org.projog.TermFactory.decimalFraction;
import static org.projog.TermFactory.integerNumber;
import static org.projog.TermFactory.list;
import static org.projog.TermFactory.structure;
import static org.projog.TermFactory.variable;
import java.util.Set;
import org.junit.Test;
import org.projog.TestUtils;
import org.projog.core.ProjogException;
import org.projog.core.kb.KnowledgeBase;
import org.projog.core.math.ArithmeticOperators;
public class TermUtilsTest {
@Test
public void testEmptyArray() {
assertEquals(0, TermUtils.EMPTY_ARRAY.length);
}
@Test
public void testCopy() {
// setup input terms
Atom a = atom("a");
Variable x = variable("X");
Variable y = variable("Y");
Variable z = variable("Z");
assertTrue(x.unify(a));
Structure p = structure("p", x, y);
Term[] input = {a, p, x, y, z};
// perform copy
Term[] output = TermUtils.copy(input);
// check result
assertEquals(input.length, output.length);
assertSame(a, output[0]);
Term t = output[1];
assertSame(TermType.STRUCTURE, t.getType());
assertSame(p.getName(), t.getName());
assertEquals(2, t.getNumberOfArguments());
assertSame(a, t.getArgument(0));
Term copyOfY = t.getArgument(1);
assertVariable(copyOfY, "Y");
assertSame(a, output[2]);
assertSame(copyOfY, output[3]);
assertVariable(output[4], "Z");
}
private void assertVariable(Term t, String id) {
assertSame(TermType.VARIABLE, t.getType());
assertSame(t, t.getTerm());
assertEquals(id, ((Variable) t).getId());
}
@Test
public void testBacktrack() {
// setup input terms
Atom a = atom("a");
Atom b = atom("b");
Atom c = atom("c");
Variable x = variable("X");
Variable y = variable("Y");
Variable z = variable("Z");
assertTrue(x.unify(a));
assertTrue(y.unify(b));
assertTrue(z.unify(c));
Term original[] = {x, a, b, y, c, z};
Term input[] = {x, a, b, y, c, z};
// perform the backtrack
TermUtils.backtrack(input);
// assert variables have backtracked
assertSame(x, x.getTerm());
assertSame(y, y.getTerm());
assertSame(z, z.getTerm());
// assert array was not manipulated
for (int i = 0; i < input.length; i++) {
assertSame(original[i], input[i]);
}
}
@Test
public void testUnifySuccess() {
// setup input terms
Variable x = variable("X");
Variable y = variable("Y");
Variable z = variable("Z");
Atom a = atom("a");
Atom b = atom("b");
Atom c = atom("c");
Term[] input1 = {x, b, z};
Term[] input2 = {a, y, c};
// attempt unification
assertTrue(TermUtils.unify(input1, input2));
// assert all variables unified to atoms
assertSame(a, x.getTerm());
assertSame(b, y.getTerm());
assertSame(c, z.getTerm());
}
@Test
public void testUnifyFailure() {
// setup input terms
Variable x = variable("X");
Variable y = variable("Y");
Variable z = variable("Z");
Atom a = atom("a");
Atom b = atom("b");
Atom c = atom("c");
Term[] input1 = {x, b, z, b};
Term[] input2 = {a, y, c, a};
// attempt unification
assertFalse(TermUtils.unify(input1, input2));
// assert all variables in input1 were backed tracked
assertSame(x, x.getTerm());
assertSame(z, z.getTerm());
// as javadocs states, terms passed in second argument to unify may not be backtracked
assertSame(b, y.getTerm());
}
@Test
public void testGetAllVariablesInTerm() {
Variable q = variable("Q");
Variable r = variable("R");
Variable s = variable("S");
Variable t = variable("T");
Variable v = variable("V");
Variable w = variable("W");
Variable x = variable("X");
Variable y = variable("Y");
Variable z = variable("Z");
Variable anon = new Variable();
Variable[] variables = {q, r, s, t, v, w, x, y, z, anon};
Structure input = structure("p1", x, v, anon, EmptyList.EMPTY_LIST, y, q, integerNumber(1), structure("p2", y, decimalFraction(1.5), w), list(s, y, integerNumber(7), r, t),
z);
Set<Variable> result = TermUtils.getAllVariablesInTerm(input);
assertEquals(variables.length, result.size());
for (Variable variable : variables) {
assertTrue(result.contains(variable));
}
}
@Test
public void testIntegerNumberCastToNumeric() {
IntegerNumber i = integerNumber();
assertSame(i, TermUtils.castToNumeric(i));
}
@Test
public void testDecimalFractionCastToNumeric() {
DecimalFraction d = decimalFraction();
assertSame(d, TermUtils.castToNumeric(d));
}
@Test
public void testAtomCastToNumeric() {
try {
Atom a = atom("1");
TermUtils.castToNumeric(a);
fail();
} catch (ProjogException e) {
assertEquals("Expected Numeric but got: ATOM with value: 1", e.getMessage());
}
}
@Test
public void testVariableCastToNumeric() {
Variable v = variable();
try {
TermUtils.castToNumeric(v);
fail();
} catch (ProjogException e) {
assertEquals("Expected Numeric but got: VARIABLE with value: X", e.getMessage());
}
IntegerNumber i = integerNumber();
v.unify(i);
assertSame(i, TermUtils.castToNumeric(v));
}
@Test
public void testStructureCastToNumeric() {
// test that, even if it represents an arithmetic expression,
// a structure causes an exception when passed to castToNumeric
Structure arithmeticExpression = structure("*", integerNumber(3), integerNumber(7));
try {
TermUtils.castToNumeric(arithmeticExpression);
fail();
} catch (ProjogException e) {
assertEquals("Expected Numeric but got: STRUCTURE with value: *(3, 7)", e.getMessage());
}
}
@Test
public void testIntegerNumberToLong() {
KnowledgeBase kb = TestUtils.createKnowledgeBase();
ArithmeticOperators operators = kb.getArithmeticOperators();
assertEquals(Integer.MAX_VALUE, TermUtils.toLong(operators, integerNumber(Integer.MAX_VALUE)));
assertEquals(1, TermUtils.toLong(operators, integerNumber(1)));
assertEquals(0, TermUtils.toLong(operators, integerNumber(0)));
assertEquals(Integer.MIN_VALUE, TermUtils.toLong(operators, integerNumber(Integer.MIN_VALUE)));
}
@Test
public void testArithmeticFunctionToLong() {
KnowledgeBase kb = TestUtils.createKnowledgeBase();
ArithmeticOperators operators = kb.getArithmeticOperators();
Structure arithmeticExpression = structure("*", integerNumber(3), integerNumber(7));
assertEquals(21, TermUtils.toLong(operators, arithmeticExpression));
}
@Test
public void testToLongExceptions() {
KnowledgeBase kb = TestUtils.createKnowledgeBase();
assertTestToLongException(kb, atom("test"), "Cannot find arithmetic operator: test/0");
assertTestToLongException(kb, structure("p", integerNumber(1), integerNumber(1)), "Cannot find arithmetic operator: p/2");
assertTestToLongException(kb, decimalFraction(0), "Expected integer but got: FRACTION with value: 0.0");
assertTestToLongException(kb, structure("+", decimalFraction(1.0), decimalFraction(1.0)), "Expected integer but got: FRACTION with value: 2.0");
}
private void assertTestToLongException(KnowledgeBase kb, Term t, String expectedExceptionMessage) {
ArithmeticOperators operators = kb.getArithmeticOperators();
try {
TermUtils.toLong(operators, t);
fail();
} catch (ProjogException e) {
assertEquals(expectedExceptionMessage, e.getMessage());
}
}
@Test
public void testGetAtomName() {
Atom a = atom("testAtomName");
assertEquals("testAtomName", TermUtils.getAtomName(a));
}
@Test
public void testGetAtomNameException() {
Structure p = structure("testAtomName", atom());
try {
TermUtils.getAtomName(p);
fail();
} catch (ProjogException e) {
assertEquals("Expected an atom but got: STRUCTURE with value: testAtomName(test)", e.getMessage());
}
}
@Test
public void assertType() {
TermUtils.assertType(atom("testAtomName"), TermType.ATOM);
try {
TermUtils.assertType(atom("testAtomName"), TermType.LIST);
fail();
} catch (ProjogException e) {
assertEquals("Expected LIST but got: ATOM with value: testAtomName", e.getMessage());
}
}
@Test
public void testToInt() {
assertToInt(0);
assertToInt(1);
assertToInt(-1);
assertToInt(Integer.MAX_VALUE);
assertToInt(Integer.MIN_VALUE);
}
private void assertToInt(long n) {
assertEquals(n, TermUtils.toInt(integerNumber(n)));
}
@Test
public void testToIntException() {
assertToIntException(Integer.MAX_VALUE + 1L);
assertToIntException(Integer.MIN_VALUE - 1L);
assertToIntException(Long.MAX_VALUE);
assertToIntException(Long.MIN_VALUE);
}
private void assertToIntException(long n) {
try {
TermUtils.toInt(integerNumber(n));
fail();
} catch (ProjogException e) {
assertEquals("Value cannot be cast to an int without losing precision: " + n, e.getMessage());
}
}
}
| |
/*
* @(#)ConcurrentLinkedQueue.java 1.12 06/04/21
*
* Copyright 2006 Sun Microsystems, Inc. All rights reserved.
* SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
package java.util.concurrent;
import java.util.*;
import java.util.concurrent.atomic.*;
/**
* An unbounded thread-safe {@linkplain Queue queue} based on linked nodes.
* This queue orders elements FIFO (first-in-first-out).
* The <em>head</em> of the queue is that element that has been on the
* queue the longest time.
* The <em>tail</em> of the queue is that element that has been on the
* queue the shortest time. New elements
* are inserted at the tail of the queue, and the queue retrieval
* operations obtain elements at the head of the queue.
* A <tt>ConcurrentLinkedQueue</tt> is an appropriate choice when
* many threads will share access to a common collection.
* This queue does not permit <tt>null</tt> elements.
*
* <p>This implementation employs an efficient "wait-free"
* algorithm based on one described in <a
* href="http://www.cs.rochester.edu/u/michael/PODC96.html"> Simple,
* Fast, and Practical Non-Blocking and Blocking Concurrent Queue
* Algorithms</a> by Maged M. Michael and Michael L. Scott.
*
* <p>Beware that, unlike in most collections, the <tt>size</tt> method
* is <em>NOT</em> a constant-time operation. Because of the
* asynchronous nature of these queues, determining the current number
* of elements requires a traversal of the elements.
*
* <p>This class and its iterator implement all of the
* <em>optional</em> methods of the {@link Collection} and {@link
* Iterator} interfaces.
*
* <p>Memory consistency effects: As with other concurrent
* collections, actions in a thread prior to placing an object into a
* {@code ConcurrentLinkedQueue}
* <a href="package-summary.html#MemoryVisibility"><i>happen-before</i></a>
* actions subsequent to the access or removal of that element from
* the {@code ConcurrentLinkedQueue} in another thread.
*
* <p>This class is a member of the
* <a href="{@docRoot}/../technotes/guides/collections/index.html">
* Java Collections Framework</a>.
*
* @since 1.5
* @author Doug Lea
* @param <E> the type of elements held in this collection
*
*/
public class ConcurrentLinkedQueue<E> extends AbstractQueue<E>
implements Queue<E>, java.io.Serializable {
private static final long serialVersionUID = 196745693267521676L;
/*
* This is a straight adaptation of Michael & Scott algorithm.
* For explanation, read the paper. The only (minor) algorithmic
* difference is that this version supports lazy deletion of
* internal nodes (method remove(Object)) -- remove CAS'es item
* fields to null. The normal queue operations unlink but then
* pass over nodes with null item fields. Similarly, iteration
* methods ignore those with nulls.
*
* Also note that like most non-blocking algorithms in this
* package, this implementation relies on the fact that in garbage
* collected systems, there is no possibility of ABA problems due
* to recycled nodes, so there is no need to use "counted
* pointers" or related techniques seen in versions used in
* non-GC'ed settings.
*/
private static class Node<E> {
private volatile E item;
private volatile Node<E> next;
private static final
AtomicReferenceFieldUpdater<Node, Node>
nextUpdater =
AtomicReferenceFieldUpdater.newUpdater
(Node.class, Node.class, "next");
private static final
AtomicReferenceFieldUpdater<Node, Object>
itemUpdater =
AtomicReferenceFieldUpdater.newUpdater
(Node.class, Object.class, "item");
Node(E x) { item = x; }
Node(E x, Node<E> n) { item = x; next = n; }
E getItem() {
return item;
}
boolean casItem(E cmp, E val) {
return itemUpdater.compareAndSet(this, cmp, val);
}
void setItem(E val) {
itemUpdater.set(this, val);
}
Node<E> getNext() {
return next;
}
boolean casNext(Node<E> cmp, Node<E> val) {
return nextUpdater.compareAndSet(this, cmp, val);
}
void setNext(Node<E> val) {
nextUpdater.set(this, val);
}
}
private static final
AtomicReferenceFieldUpdater<ConcurrentLinkedQueue, Node>
tailUpdater =
AtomicReferenceFieldUpdater.newUpdater
(ConcurrentLinkedQueue.class, Node.class, "tail");
private static final
AtomicReferenceFieldUpdater<ConcurrentLinkedQueue, Node>
headUpdater =
AtomicReferenceFieldUpdater.newUpdater
(ConcurrentLinkedQueue.class, Node.class, "head");
private boolean casTail(Node<E> cmp, Node<E> val) {
return tailUpdater.compareAndSet(this, cmp, val);
}
private boolean casHead(Node<E> cmp, Node<E> val) {
return headUpdater.compareAndSet(this, cmp, val);
}
/**
* Pointer to header node, initialized to a dummy node. The first
* actual node is at head.getNext().
*/
private transient volatile Node<E> head = new Node<E>(null, null);
/** Pointer to last node on list **/
private transient volatile Node<E> tail = head;
/**
* Creates a <tt>ConcurrentLinkedQueue</tt> that is initially empty.
*/
public ConcurrentLinkedQueue() {}
/**
* Creates a <tt>ConcurrentLinkedQueue</tt>
* initially containing the elements of the given collection,
* added in traversal order of the collection's iterator.
* @param c the collection of elements to initially contain
* @throws NullPointerException if the specified collection or any
* of its elements are null
*/
public ConcurrentLinkedQueue(Collection<? extends E> c) {
for (Iterator<? extends E> it = c.iterator(); it.hasNext();)
add(it.next());
}
// Have to override just to update the javadoc
/**
* Inserts the specified element at the tail of this queue.
*
* @return <tt>true</tt> (as specified by {@link Collection#add})
* @throws NullPointerException if the specified element is null
*/
public boolean add(E e) {
return offer(e);
}
/**
* Inserts the specified element at the tail of this queue.
*
* @return <tt>true</tt> (as specified by {@link Queue#offer})
* @throws NullPointerException if the specified element is null
*/
public boolean offer(E e) {
if (e == null) throw new NullPointerException();
Node<E> n = new Node<E>(e, null);
for (;;) {
Node<E> t = tail;
Node<E> s = t.getNext();
if (t == tail) {
if (s == null) {
if (t.casNext(s, n)) {
casTail(t, n);
return true;
}
} else {
casTail(t, s);
}
}
}
}
public E poll() {
for (;;) {
Node<E> h = head;
Node<E> t = tail;
Node<E> first = h.getNext();
if (h == head) {
if (h == t) {
if (first == null)
return null;
else
casTail(t, first);
} else if (casHead(h, first)) {
E item = first.getItem();
if (item != null) {
first.setItem(null);
return item;
}
// else skip over deleted item, continue loop,
}
}
}
}
public E peek() { // same as poll except don't remove item
for (;;) {
Node<E> h = head;
Node<E> t = tail;
Node<E> first = h.getNext();
if (h == head) {
if (h == t) {
if (first == null)
return null;
else
casTail(t, first);
} else {
E item = first.getItem();
if (item != null)
return item;
else // remove deleted node and continue
casHead(h, first);
}
}
}
}
/**
* Returns the first actual (non-header) node on list. This is yet
* another variant of poll/peek; here returning out the first
* node, not element (so we cannot collapse with peek() without
* introducing race.)
*/
Node<E> first() {
for (;;) {
Node<E> h = head;
Node<E> t = tail;
Node<E> first = h.getNext();
if (h == head) {
if (h == t) {
if (first == null)
return null;
else
casTail(t, first);
} else {
if (first.getItem() != null)
return first;
else // remove deleted node and continue
casHead(h, first);
}
}
}
}
/**
* Returns <tt>true</tt> if this queue contains no elements.
*
* @return <tt>true</tt> if this queue contains no elements
*/
public boolean isEmpty() {
return first() == null;
}
/**
* Returns the number of elements in this queue. If this queue
* contains more than <tt>Integer.MAX_VALUE</tt> elements, returns
* <tt>Integer.MAX_VALUE</tt>.
*
* <p>Beware that, unlike in most collections, this method is
* <em>NOT</em> a constant-time operation. Because of the
* asynchronous nature of these queues, determining the current
* number of elements requires an O(n) traversal.
*
* @return the number of elements in this queue
*/
public int size() {
int count = 0;
for (Node<E> p = first(); p != null; p = p.getNext()) {
if (p.getItem() != null) {
// Collections.size() spec says to max out
if (++count == Integer.MAX_VALUE)
break;
}
}
return count;
}
/**
* Returns <tt>true</tt> if this queue contains the specified element.
* More formally, returns <tt>true</tt> if and only if this queue contains
* at least one element <tt>e</tt> such that <tt>o.equals(e)</tt>.
*
* @param o object to be checked for containment in this queue
* @return <tt>true</tt> if this queue contains the specified element
*/
public boolean contains(Object o) {
if (o == null) return false;
for (Node<E> p = first(); p != null; p = p.getNext()) {
E item = p.getItem();
if (item != null &&
o.equals(item))
return true;
}
return false;
}
/**
* Removes a single instance of the specified element from this queue,
* if it is present. More formally, removes an element <tt>e</tt> such
* that <tt>o.equals(e)</tt>, if this queue contains one or more such
* elements.
* Returns <tt>true</tt> if this queue contained the specified element
* (or equivalently, if this queue changed as a result of the call).
*
* @param o element to be removed from this queue, if present
* @return <tt>true</tt> if this queue changed as a result of the call
*/
public boolean remove(Object o) {
if (o == null) return false;
for (Node<E> p = first(); p != null; p = p.getNext()) {
E item = p.getItem();
if (item != null &&
o.equals(item) &&
p.casItem(item, null))
return true;
}
return false;
}
/**
* Returns an array containing all of the elements in this queue, in
* proper sequence.
*
* <p>The returned array will be "safe" in that no references to it are
* maintained by this queue. (In other words, this method must allocate
* a new array). The caller is thus free to modify the returned array.
*
* <p>This method acts as bridge between array-based and collection-based
* APIs.
*
* @return an array containing all of the elements in this queue
*/
public Object[] toArray() {
// Use ArrayList to deal with resizing.
ArrayList<E> al = new ArrayList<E>();
for (Node<E> p = first(); p != null; p = p.getNext()) {
E item = p.getItem();
if (item != null)
al.add(item);
}
return al.toArray();
}
/**
* Returns an array containing all of the elements in this queue, in
* proper sequence; the runtime type of the returned array is that of
* the specified array. If the queue fits in the specified array, it
* is returned therein. Otherwise, a new array is allocated with the
* runtime type of the specified array and the size of this queue.
*
* <p>If this queue fits in the specified array with room to spare
* (i.e., the array has more elements than this queue), the element in
* the array immediately following the end of the queue is set to
* <tt>null</tt>.
*
* <p>Like the {@link #toArray()} method, this method acts as bridge between
* array-based and collection-based APIs. Further, this method allows
* precise control over the runtime type of the output array, and may,
* under certain circumstances, be used to save allocation costs.
*
* <p>Suppose <tt>x</tt> is a queue known to contain only strings.
* The following code can be used to dump the queue into a newly
* allocated array of <tt>String</tt>:
*
* <pre>
* String[] y = x.toArray(new String[0]);</pre>
*
* Note that <tt>toArray(new Object[0])</tt> is identical in function to
* <tt>toArray()</tt>.
*
* @param a the array into which the elements of the queue are to
* be stored, if it is big enough; otherwise, a new array of the
* same runtime type is allocated for this purpose
* @return an array containing all of the elements in this queue
* @throws ArrayStoreException if the runtime type of the specified array
* is not a supertype of the runtime type of every element in
* this queue
* @throws NullPointerException if the specified array is null
*/
public <T> T[] toArray(T[] a) {
// try to use sent-in array
int k = 0;
Node<E> p;
for (p = first(); p != null && k < a.length; p = p.getNext()) {
E item = p.getItem();
if (item != null)
a[k++] = (T)item;
}
if (p == null) {
if (k < a.length)
a[k] = null;
return a;
}
// If won't fit, use ArrayList version
ArrayList<E> al = new ArrayList<E>();
for (Node<E> q = first(); q != null; q = q.getNext()) {
E item = q.getItem();
if (item != null)
al.add(item);
}
return (T[])al.toArray(a);
}
/**
* Returns an iterator over the elements in this queue in proper sequence.
* The returned iterator is a "weakly consistent" iterator that
* will never throw {@link ConcurrentModificationException},
* and guarantees to traverse elements as they existed upon
* construction of the iterator, and may (but is not guaranteed to)
* reflect any modifications subsequent to construction.
*
* @return an iterator over the elements in this queue in proper sequence
*/
public Iterator<E> iterator() {
return new Itr();
}
private class Itr implements Iterator<E> {
/**
* Next node to return item for.
*/
private Node<E> nextNode;
/**
* nextItem holds on to item fields because once we claim
* that an element exists in hasNext(), we must return it in
* the following next() call even if it was in the process of
* being removed when hasNext() was called.
*/
private E nextItem;
/**
* Node of the last returned item, to support remove.
*/
private Node<E> lastRet;
Itr() {
advance();
}
/**
* Moves to next valid node and returns item to return for
* next(), or null if no such.
*/
private E advance() {
lastRet = nextNode;
E x = nextItem;
Node<E> p = (nextNode == null)? first() : nextNode.getNext();
for (;;) {
if (p == null) {
nextNode = null;
nextItem = null;
return x;
}
E item = p.getItem();
if (item != null) {
nextNode = p;
nextItem = item;
return x;
} else // skip over nulls
p = p.getNext();
}
}
public boolean hasNext() {
return nextNode != null;
}
public E next() {
if (nextNode == null) throw new NoSuchElementException();
return advance();
}
public void remove() {
Node<E> l = lastRet;
if (l == null) throw new IllegalStateException();
// rely on a future traversal to relink.
l.setItem(null);
lastRet = null;
}
}
/**
* Save the state to a stream (that is, serialize it).
*
* @serialData All of the elements (each an <tt>E</tt>) in
* the proper order, followed by a null
* @param s the stream
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
// Write out any hidden stuff
s.defaultWriteObject();
// Write out all elements in the proper order.
for (Node<E> p = first(); p != null; p = p.getNext()) {
Object item = p.getItem();
if (item != null)
s.writeObject(item);
}
// Use trailing null as sentinel
s.writeObject(null);
}
/**
* Reconstitute the Queue instance from a stream (that is,
* deserialize it).
* @param s the stream
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
// Read in capacity, and any hidden stuff
s.defaultReadObject();
head = new Node<E>(null, null);
tail = head;
// Read in all elements and place in queue
for (;;) {
E item = (E)s.readObject();
if (item == null)
break;
else
offer(item);
}
}
}
| |
package org.gwtbootstrap3.client.ui.base.button;
/*
* #%L
* GwtBootstrap3
* %%
* Copyright (C) 2013 GwtBootstrap3
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.List;
import org.gwtbootstrap3.client.shared.js.JQuery;
import org.gwtbootstrap3.client.ui.base.ComplexWidget;
import org.gwtbootstrap3.client.ui.base.HasActive;
import org.gwtbootstrap3.client.ui.base.HasDataTarget;
import org.gwtbootstrap3.client.ui.base.HasSize;
import org.gwtbootstrap3.client.ui.base.HasType;
import org.gwtbootstrap3.client.ui.base.helper.StyleHelper;
import org.gwtbootstrap3.client.ui.base.mixin.ActiveMixin;
import org.gwtbootstrap3.client.ui.base.mixin.DataTargetMixin;
import org.gwtbootstrap3.client.ui.base.mixin.EnabledMixin;
import org.gwtbootstrap3.client.ui.base.mixin.FocusableMixin;
import org.gwtbootstrap3.client.ui.constants.Attributes;
import org.gwtbootstrap3.client.ui.constants.ButtonDismiss;
import org.gwtbootstrap3.client.ui.constants.ButtonSize;
import org.gwtbootstrap3.client.ui.constants.ButtonType;
import org.gwtbootstrap3.client.ui.constants.Styles;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.NativeEvent;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.DomEvent;
import com.google.gwt.event.dom.client.HasAllMouseHandlers;
import com.google.gwt.event.dom.client.HasClickHandlers;
import com.google.gwt.event.dom.client.MouseDownEvent;
import com.google.gwt.event.dom.client.MouseDownHandler;
import com.google.gwt.event.dom.client.MouseMoveEvent;
import com.google.gwt.event.dom.client.MouseMoveHandler;
import com.google.gwt.event.dom.client.MouseOutEvent;
import com.google.gwt.event.dom.client.MouseOutHandler;
import com.google.gwt.event.dom.client.MouseOverEvent;
import com.google.gwt.event.dom.client.MouseOverHandler;
import com.google.gwt.event.dom.client.MouseUpEvent;
import com.google.gwt.event.dom.client.MouseUpHandler;
import com.google.gwt.event.dom.client.MouseWheelEvent;
import com.google.gwt.event.dom.client.MouseWheelHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.ui.Focusable;
import com.google.gwt.user.client.ui.HasEnabled;
import com.google.gwt.user.client.ui.Widget;
/**
* Abstract base class for different kinds of buttons.
*
* @author Sven Jacobs
* @author Joshua Godi
*/
public abstract class AbstractButton extends ComplexWidget implements HasEnabled, HasActive, HasType<ButtonType>,
HasSize<ButtonSize>, HasDataTarget, HasClickHandlers, Focusable, HasAllMouseHandlers {
public class ButtonStateHandler {
private ButtonStateHandler() {
}
public void loading() {
button(getElement(), "loading");
}
public void reset() {
button(getElement(), "reset");
}
/**
* Resets button to specified text state.
*
* @param state Text state
*/
public void reset(final String state) {
button(getElement(), state);
}
}
private final ButtonStateHandler buttonStateHandler = new ButtonStateHandler();
private final DataTargetMixin<AbstractButton> targetMixin = new DataTargetMixin<AbstractButton>(this);
private final ActiveMixin<AbstractButton> activeMixin = new ActiveMixin<AbstractButton>(this);
private final FocusableMixin<AbstractButton> focusableMixin = new FocusableMixin<AbstractButton>(this);
private final EnabledMixin<AbstractButton> enabledMixin = new EnabledMixin<AbstractButton>(this);
/**
* Creates button with DEFAULT type.
*/
protected AbstractButton() {
this(ButtonType.DEFAULT);
}
protected AbstractButton(final ButtonType type) {
setElement(createElement());
setStyleName(Styles.BTN);
setType(type);
}
@Override
public boolean isActive() {
return activeMixin.isActive();
}
@Override
public void setActive(final boolean active) {
activeMixin.setActive(active);
}
@Override
public void setEnabled(final boolean enabled) {
enabledMixin.setEnabled(enabled);
}
@Override
public boolean isEnabled() {
return enabledMixin.isEnabled();
}
@Override
public HandlerRegistration addClickHandler(final ClickHandler handler) {
return addDomHandler(handler, ClickEvent.getType());
}
/**
* Sets type of button.
*
* @param type Type of button
*/
@Override
public void setType(final ButtonType type) {
StyleHelper.addUniqueEnumStyleName(this, ButtonType.class, type);
}
@Override
public ButtonType getType() {
return ButtonType.fromStyleName(getStyleName());
}
/**
* Sets size of button.
*
* @param size Size of button
*/
@Override
public void setSize(final ButtonSize size) {
StyleHelper.addUniqueEnumStyleName(this, ButtonSize.class, size);
}
@Override
public ButtonSize getSize() {
return ButtonSize.fromStyleName(getStyleName());
}
@Override
public void setDataTargetWidgets(final List<Widget> widgets) {
targetMixin.setDataTargetWidgets(widgets);
}
@Override
public void setDataTargetWidget(final Widget widget) {
targetMixin.setDataTargetWidget(widget);
}
@Override
public void setDataTarget(final String dataTarget) {
targetMixin.setDataTarget(dataTarget);
}
@Override
public String getDataTarget() {
return targetMixin.getDataTarget();
}
@Override
public int getTabIndex() {
return focusableMixin.getTabIndex();
}
@Override
public void setAccessKey(final char key) {
focusableMixin.setAccessKey(key);
}
@Override
public void setFocus(final boolean focused) {
focusableMixin.setFocus(focused);
}
@Override
public void setTabIndex(final int index) {
focusableMixin.setTabIndex(index);
}
@Override
public HandlerRegistration addMouseDownHandler(final MouseDownHandler handler) {
return addDomHandler(handler, MouseDownEvent.getType());
}
@Override
public HandlerRegistration addMouseMoveHandler(final MouseMoveHandler handler) {
return addDomHandler(handler, MouseMoveEvent.getType());
}
@Override
public HandlerRegistration addMouseOutHandler(final MouseOutHandler handler) {
return addDomHandler(handler, MouseOutEvent.getType());
}
@Override
public HandlerRegistration addMouseOverHandler(final MouseOverHandler handler) {
return addDomHandler(handler, MouseOverEvent.getType());
}
@Override
public HandlerRegistration addMouseUpHandler(final MouseUpHandler handler) {
return addDomHandler(handler, MouseUpEvent.getType());
}
@Override
public HandlerRegistration addMouseWheelHandler(final MouseWheelHandler handler) {
return addDomHandler(handler, MouseWheelEvent.getType());
}
/**
* Makes button a block level element that spawns full width of parent.
*
* @param block True for block level element
*/
public void setBlock(final boolean block) {
if (block) {
addStyleName(Styles.BTN_BLOCK);
} else {
removeStyleName(Styles.BTN_BLOCK);
}
}
/**
* Sets dismiss type of button.
* <p/>
* If button is inside a
* {@link org.gwtbootstrap3.client.ui.Modal} and dismiss type is
* {@code MODAL} the button will act as the dismiss (close) button for this
* Modal. Same goes for {@link org.gwtbootstrap3.client.ui.Alert}
* and dismiss type {@code ALERT}.
*
* @param dismiss Dismiss type of button
* @see org.gwtbootstrap3.client.ui.Modal
* @see org.gwtbootstrap3.client.ui.Alert
*/
public void setDataDismiss(final ButtonDismiss dismiss) {
if (dismiss != null) {
getElement().setAttribute(Attributes.DATA_DISMISS, dismiss.getDismiss());
} else {
getElement().removeAttribute(Attributes.DATA_DISMISS);
}
}
public void setDataLoadingText(final String loadingText) {
if (loadingText != null) {
getElement().setAttribute(Attributes.DATA_LOADING_TEXT, loadingText);
} else {
getElement().removeAttribute(Attributes.DATA_LOADING_TEXT);
}
}
public void toggle() {
button(getElement(), "toggle");
}
public ButtonStateHandler state() {
return buttonStateHandler;
}
public void click() {
final NativeEvent event = Document.get().createClickEvent(0, 0, 0, 0, 0, false, false, false, false);
DomEvent.fireNativeEvent(event, this);
}
protected abstract Element createElement();
// @formatter:off
private void button(final Element e, final String arg) {
JQuery.jQuery(e).button(arg);
}
}
| |
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.conflicts;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.gradle.api.Action;
import org.gradle.api.Describable;
import org.gradle.api.artifacts.ModuleIdentifier;
import org.gradle.api.artifacts.ModuleVersionIdentifier;
import org.gradle.api.artifacts.component.ComponentIdentifier;
import org.gradle.api.capabilities.Capability;
import org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.builder.ComponentState;
import org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.builder.NodeState;
import org.gradle.api.internal.artifacts.ivyservice.resolveengine.result.ComponentSelectionDescriptorInternal;
import org.gradle.api.internal.artifacts.ivyservice.resolveengine.result.ComponentSelectionReasons;
import org.gradle.internal.component.external.model.CapabilityInternal;
import java.util.ArrayDeque;
import java.util.Collection;
import java.util.Deque;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class DefaultCapabilitiesConflictHandler implements CapabilitiesConflictHandler {
private final List<Resolver> resolvers = Lists.newArrayListWithExpectedSize(3);
private final Map<String, Set<NodeState>> capabilityWithoutVersionToNodes = Maps.newHashMap();
private final Deque<CapabilityConflict> conflicts = new ArrayDeque<>();
@Override
public PotentialConflict registerCandidate(CapabilitiesConflictHandler.Candidate candidate) {
CapabilityInternal capability = (CapabilityInternal) candidate.getCapability();
String group = capability.getGroup();
String name = capability.getName();
final Set<NodeState> nodes = findNodesFor(capability);
Collection<NodeState> implicitCapabilityProviders = candidate.getImplicitCapabilityProviders();
nodes.addAll(implicitCapabilityProviders);
NodeState node = candidate.getNode();
if (nodes.add(node) && nodes.size() > 1) {
// The registered nodes may contain nodes which are no longer selected.
// We don't remove them from the list in the first place because it proved to be
// slower than filtering as needed.
ModuleIdentifier rootId = null;
final List<NodeState> candidatesForConflict = Lists.newArrayListWithCapacity(nodes.size());
for (NodeState ns : nodes) {
if (ns.isSelected()) {
candidatesForConflict.add(ns);
if (ns.isRoot()) {
rootId = ns.getComponent().getId().getModule();
}
}
}
if (rootId != null && candidatesForConflict.size() > 1) {
// This is a special case for backwards compatibility: it is possible to have
// a cycle where the root component depends on a library which transitively
// depends on a different version of the root module. In this case, we effectively
// allow 2 modules to have the same capability, so we filter the nodes coming
// from transitive dependencies
ModuleIdentifier rootModuleId = rootId;
candidatesForConflict.removeIf(n -> !n.isRoot() && n.getComponent().getId().getModule().equals(rootModuleId));
}
if (candidatesForConflict.size() > 1) {
PotentialConflict conflict = new PotentialConflict() {
@Override
public void withParticipatingModules(Action<ModuleIdentifier> action) {
for (NodeState node : candidatesForConflict) {
action.execute(node.getComponent().getId().getModule());
}
}
@Override
public boolean conflictExists() {
return true;
}
};
conflicts.add(new CapabilityConflict(group, name, candidatesForConflict));
return conflict;
}
}
return PotentialConflictFactory.noConflict();
}
private Set<NodeState> findNodesFor(CapabilityInternal capability) {
String capabilityId = capability.getCapabilityId();
return capabilityWithoutVersionToNodes.computeIfAbsent(capabilityId, k -> Sets.newLinkedHashSet());
}
@Override
public boolean hasConflicts() {
return !conflicts.isEmpty();
}
@Override
public void resolveNextConflict(Action<ConflictResolutionResult> resolutionAction) {
CapabilityConflict conflict = conflicts.poll();
Details details = new Details(conflict);
for (Resolver resolver : resolvers) {
resolver.resolve(details);
if (details.hasResult()) {
resolutionAction.execute(details);
ComponentSelectionDescriptorInternal conflictResolution = ComponentSelectionReasons.CONFLICT_RESOLUTION;
if (details.reason != null) {
conflictResolution = conflictResolution.withDescription(details.reason);
}
details.getSelected().addCause(conflictResolution);
return;
}
}
}
@Override
public void registerResolver(Resolver conflictResolver) {
resolvers.add(conflictResolver);
}
@Override
public boolean hasSeenCapability(Capability capability) {
return capabilityWithoutVersionToNodes.containsKey(((CapabilityInternal) capability).getCapabilityId());
}
@Override
public boolean hasKnownConflictFor(ModuleVersionIdentifier id) {
if (conflicts.isEmpty()) {
return false;
}
return conflicts.stream().flatMap(capability -> capability.nodes.stream()).anyMatch(node -> node.getComponent().getId().equals(id));
}
public static CapabilitiesConflictHandler.Candidate candidate(NodeState node, Capability capability, Collection<NodeState> implicitCapabilityProviders) {
return new Candidate(node, capability, implicitCapabilityProviders);
}
private static class Candidate implements CapabilitiesConflictHandler.Candidate {
private final NodeState node;
private final Capability capability;
private final Collection<NodeState> implicitCapabilityProviders;
public Candidate(NodeState node, Capability capability, Collection<NodeState> implicitCapabilityProviders) {
this.node = node;
this.capability = capability;
this.implicitCapabilityProviders = implicitCapabilityProviders;
}
@Override
public NodeState getNode() {
return node;
}
@Override
public Capability getCapability() {
return capability;
}
@Override
public Collection<NodeState> getImplicitCapabilityProviders() {
return implicitCapabilityProviders;
}
}
private static class Details implements ResolutionDetails {
private final CapabilityConflict conflict;
private final Set<NodeState> evicted = Sets.newHashSet();
private NodeState selected;
private Describable reason;
private Details(CapabilityConflict conflict) {
this.conflict = conflict;
}
@Override
public Collection<? extends Capability> getCapabilityVersions() {
return conflict.descriptors;
}
@Override
public Collection<? extends CandidateDetails> getCandidates(Capability capability) {
ImmutableList.Builder<CandidateDetails> candidates = new ImmutableList.Builder<>();
String group = capability.getGroup();
String name = capability.getName();
String version = capability.getVersion();
for (final NodeState node : conflict.nodes) {
if (!evicted.contains(node)) {
Capability componentCapability = node.findCapability(group, name);
if (componentCapability != null && componentCapability.getVersion().equals(version)) {
candidates.add(new CandidateDetails() {
@Override
public ComponentIdentifier getId() {
return node.getComponent().getComponentId();
}
@Override
public String getVariantName() {
return node.getResolvedConfigurationId().getConfiguration();
}
@Override
public void evict() {
node.evict();
evicted.add(node);
}
@Override
public void select() {
selected = node;
}
@Override
public void reject() {
ComponentState component = node.getComponent();
component.rejectForCapabilityConflict(capability, conflictedNodes(node, conflict.nodes));
component.selectAndRestartModule();
}
@Override
public void byReason(Describable description) {
reason = description;
}
});
}
}
}
return candidates.build();
}
private Collection<NodeState> conflictedNodes(NodeState node, Collection<NodeState> nodes) {
List<NodeState> conflictedNodes = Lists.newArrayList(nodes);
conflictedNodes.remove(node);
return conflictedNodes;
}
@Override
public void withParticipatingModules(Action<? super ModuleIdentifier> action) {
Set<ModuleIdentifier> seen = Sets.newHashSet();
for (NodeState node : conflict.nodes) {
ModuleIdentifier module = node.getComponent().getId().getModule();
if (seen.add(module)) {
action.execute(module);
}
}
}
@Override
public boolean hasResult() {
return selected != null;
}
@Override
public ComponentState getSelected() {
return selected.getComponent();
}
}
private static class CapabilityConflict {
private final Collection<NodeState> nodes;
private final Set<Capability> descriptors;
private CapabilityConflict(String group, String name, Collection<NodeState> nodes) {
this.nodes = nodes;
final ImmutableSet.Builder<Capability> builder = new ImmutableSet.Builder<>();
for (final NodeState node : nodes) {
Capability capability = node.findCapability(group, name);
if (capability != null) {
builder.add(capability);
}
}
this.descriptors = builder.build();
}
}
private static boolean sameComponentAppearsMultipleTimes(CapabilityConflict conflict) {
Set<ComponentState> components = Sets.newHashSet();
for (NodeState node : conflict.nodes) {
if (!components.add(node.getComponent())) {
return true;
}
}
return false;
}
}
| |
package by.istin.android.xcore.test.vk;
import android.content.ContentValues;
import android.provider.BaseColumns;
import by.istin.android.xcore.annotations.dbDouble;
import by.istin.android.xcore.annotations.dbInteger;
import by.istin.android.xcore.annotations.dbLong;
import by.istin.android.xcore.annotations.dbString;
import by.istin.android.xcore.db.impl.DBHelper;
import by.istin.android.xcore.db.entity.IBeforeUpdate;
import by.istin.android.xcore.db.IDBConnection;
import by.istin.android.xcore.source.DataSourceRequest;
import by.istin.android.xcore.utils.HashUtils;
import com.google.gson.annotations.SerializedName;
public class Attachment implements BaseColumns, IBeforeUpdate {
@dbLong
public static final String ID = _ID;
@dbString
public static final String TYPE = "type";
@dbLong
public static final String MESSAGE_ID = "message_id";
@dbLong
public static final String DIALOG_ID = "dialog_id";
/* ===== DOC ===== */
@dbLong
@SerializedName(value="doc:did")
public static final String DOC_ID = "did";
@dbLong
@SerializedName(value="doc:owner_id")
public static final String DOC_OWNER_ID = "doc_owner_id";
@dbLong
@SerializedName(value="doc:size")
public static final String DOC_SIZE = "doc_size";
@dbString
@SerializedName(value="doc:title")
public static final String DOC_TITLE = "doc_title";
@dbString
@SerializedName(value="doc:ext")
public static final String DOC_EXT = "doc_ext";
@dbString
@SerializedName(value="doc:url")
public static final String DOC_URL = "doc_url";
@dbString
@SerializedName(value="doc:access_key")
public static final String DOC_ACCESS_KEY = "doc_access_key";
/* ===== AUDIO ===== */
@dbLong
@SerializedName(value="audio:aid")
public static final String AUDIO_ID = "aid";
@dbLong
@SerializedName(value="audio:owner_id")
public static final String AUDIO_OWNER_ID = "audio_owner_id";
@dbInteger
@SerializedName(value="audio:duration")
public static final String AUDIO_DURATION = "audio_duration";
@dbString
@SerializedName(value="audio:artist")
public static final String AUDIO_ARTIST = "audio_artist";
@dbString
@SerializedName(value="audio:title")
public static final String AUDIO_TITLE = "audio_title";
@dbString
@SerializedName(value="audio:url")
public static final String AUDIO_URL = "audio_url";
@dbString
@SerializedName(value="audio:performer")
public static final String AUDIO_PERFORMER = "audio_performer";
@dbString
@SerializedName(value="audio:album")
public static final String AUDIO_ALBUM = "audio_album";
/* ===== PHOTO ===== */
@dbLong
@SerializedName(value="photo:pid")
public static final String PHOTO_ID = "pid";
@dbLong
@SerializedName(value="photo:aid")
public static final String PHOTO_AID = "photo_aid";
@dbLong
@SerializedName(value="photo:owner_id")
public static final String PHOTO_OWNER_ID = "photo_owner_id";
@dbLong
@SerializedName(value="photo:created")
public static final String PHOTO_CREATED = "photo_created";
@dbInteger
@SerializedName(value="photo:width")
public static final String PHOTO_WIDTH = "photo_width";
@dbInteger
@SerializedName(value="photo:height")
public static final String PHOTO_HEIGHT = "photo_height";
@dbDouble
@SerializedName(value="photo:lat")
public static final String PHOTO_LAT = "photo_lat";
@dbDouble
@SerializedName(value="photo:long")
public static final String PHOTO_LONG = "photo_long";
@dbString
@SerializedName(value="photo:src")
public static final String PHOTO_SRC = "photo_src";
@dbString
@SerializedName(value="photo:src_big")
public static final String PHOTO_SRC_BIG = "photo_src_big";
@dbString
@SerializedName(value="photo:src_small")
public static final String PHOTO_SRC_SMALL = "photo_src_small";
@dbString
@SerializedName(value="photo:src_xbig")
public static final String PHOTO_SRC_XBIG = "photo_src_xbig";
@dbString
@SerializedName(value="photo:src_xxbig")
public static final String PHOTO_SRC_XXBIG = "photo_src_xxbig";
@dbString
@SerializedName(value="photo:src_xxxbig")
public static final String PHOTO_SRC_XXXBIG = "photo_src_xxxbig";
@dbString
@SerializedName(value="photo:text")
public static final String PHOTO_TEXT = "photo_text";
@dbString
@SerializedName(value="photo:access_key")
public static final String PHOTO_ACCESS_KEY = "photo_access_key";
/* ===== VIDEO ===== */
@dbLong
@SerializedName(value="video:vid")
public static final String VIDEO_ID = "vid";
@dbLong
@SerializedName(value="video:owner_id")
public static final String VIDEO_OWNER_ID = "video_owner_id";
@dbLong
@SerializedName(value="video:date")
public static final String VIDEO_DATE = "video_date";
@dbLong
@SerializedName(value="video:views")
public static final String VIDEO_VIEWS = "video_views";
@dbLong
@SerializedName(value="video:duration")
public static final String VIDEO_DURATION = "video_duration";
@dbString
@SerializedName(value="video:title")
public static final String VIDEO_TITLE = "video_title";
@dbString
@SerializedName(value="video:description")
public static final String VIDEO_DESCRIPTION = "video_description";
@dbString
@SerializedName(value="video:image")
public static final String VIDEO_IMAGE = "video_image";
@dbString
@SerializedName(value="video:image_big")
public static final String VIDEO_IMAGE_BIG = "video_image_big";
@dbString
@SerializedName(value="video:image_small")
public static final String VIDEO_IMAGE_SMALL = "video_image_small";
@dbString
@SerializedName(value="video:image_xbig")
public static final String VIDEO_IMAGE_XBIG = "video_image_xbig";
@dbString
@SerializedName(value="video:access_key")
public static final String VIDEO_ACCESS_KEY = "video_access_key";
/* ===== GEO ===== */
@dbString
@SerializedName(value="geo:type")
public static final String GEO_TYPE = "geo_type";
@dbString
@SerializedName(value="geo:coordinates")
public static final String GEO_COORDINATES = "geo_coordinates";
@dbString
@SerializedName(value="geo:place:title")
public static final String GEO_PLACE_TITLE = "geo_place_title";
@dbString
@SerializedName(value="geo:place:country")
public static final String GEO_PLACE_COUNTRY = "geo_place_country";
@dbString
@SerializedName(value="geo:place:city")
public static final String GEO_PLACE_CITY = "geo_place_city";
@Override
public void onBeforeUpdate(DBHelper dbHelper, IDBConnection db, DataSourceRequest dataSourceRequest, ContentValues contentValues) {
String hashValue = contentValues.getAsString(TYPE)
+ contentValues.getAsLong(DOC_ID)
+ contentValues.getAsLong(AUDIO_ID)
+ contentValues.getAsLong(VIDEO_ID)
+ contentValues.getAsLong(PHOTO_ID)
+ contentValues.getAsLong(MESSAGE_ID)
+ contentValues.getAsLong(DIALOG_ID)
+ contentValues.getAsString(GEO_COORDINATES);
contentValues.put(_ID, HashUtils.generateId(hashValue));
}
}
| |
package net.axstudio.axparty.guessword;
import java.util.Locale;
import java.util.Vector;
import net.axstudio.axparty.guessword.Rule.PlayerType;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.view.ViewPager;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.ListView;
public class StartGameActivity extends FragmentActivity
{
/**
* The {@link android.support.v4.view.PagerAdapter} that will provide
* fragments for each of the sections. We use a
* {@link android.support.v4.app.FragmentPagerAdapter} derivative, which
* will keep every loaded fragment in memory. If this becomes too memory
* intensive, it may be best to switch to a
* {@link android.support.v4.app.FragmentStatePagerAdapter}.
*/
SectionsPagerAdapter mSectionsPagerAdapter;
/**
* The {@link ViewPager} that will host the section contents.
*/
ViewPager mViewPager;
public int mNumPlayers = 7;
public int mNumWordChars = 2;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_start_game);
// Create the adapter that will return a fragment for each of the three
// primary sections of the app.
mSectionsPagerAdapter = new SectionsPagerAdapter(this,
getSupportFragmentManager());
// Set up the ViewPager with the sections adapter.
mViewPager = (ViewPager) findViewById(R.id.pager);
mViewPager.setAdapter(mSectionsPagerAdapter);
}
@Override
public boolean onCreateOptionsMenu(Menu menu)
{
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.start_game, menu);
return true;
}
static final String NUM_PLAYERS_KEY = "num_players";
static final String NUM_WORD_CHARS_KEY = "num_word_chars";
class RuleAdapter
{
Context mContext;
Rule mRule;
RuleAdapter(Context context, Rule rule)
{
mContext = context;
mRule = rule;
}
public String toString()
{
return String.format(Locale.getDefault(),
mContext.getString(R.string.rule_name),
mRule.getTotalPlayers(),
mRule.getNumPlayersByType(PlayerType.MAJOR),
mRule.getNumPlayersByType(PlayerType.MINOR),
mRule.getNumPlayersByType(PlayerType.IDIOT));
}
}
/**
* A {@link FragmentPagerAdapter} that returns a fragment corresponding to
* one of the sections/tabs/pages.
*/
public class SectionsPagerAdapter extends FragmentPagerAdapter
{
DummySectionFragment[] mFragments;
public SectionsPagerAdapter(Context context, FragmentManager fm)
{
super(fm);
mFragments = new DummySectionFragment[] {
new DummySectionFragment()
{
@Override
public View createChildView(LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState)
{
ListView view = new ListView(inflater.getContext());
GuessWordApp app = (GuessWordApp) inflater
.getContext().getApplicationContext();
Vector<RuleAdapter> data = new Vector<RuleAdapter>();
for (Rule r : app.getDefaultRules())
data.add(new RuleAdapter(inflater.getContext(),
r));
ArrayAdapter<RuleAdapter> rules = new ArrayAdapter<RuleAdapter>(
inflater.getContext(),
android.R.layout.simple_list_item_single_choice,
data);
view.setAdapter(rules);
view.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
mNumPlayers = app.getDefaultGameSetting().getInt(
NUM_PLAYERS_KEY, mNumPlayers);
for (int i = 0; i < rules.getCount(); ++i)
{
if (rules.getItem(i).mRule.getTotalPlayers() == mNumPlayers)
{
view.setItemChecked(i, true);
break;
}
}
view.setOnItemClickListener(new OnItemClickListener()
{
@Override
public void onItemClick(AdapterView<?> parent,
View view, int position, long id)
{
GuessWordApp app = GuessWordApp.getApp(view
.getContext());
RuleAdapter rule = (RuleAdapter) parent
.getItemAtPosition(position);
mNumPlayers = rule.mRule.getTotalPlayers();
SharedPreferences.Editor editor = app
.getDefaultGameSetting().edit();
editor.putInt(NUM_PLAYERS_KEY, mNumPlayers);
editor.commit();
}
});
return view;
}
public CharSequence getPageTitle()
{
return getApplicationContext().getString(
R.string.title_select_player_num);
}
}, new DummySectionFragment()
{
@Override
public View createChildView(LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState)
{
ListView view = new ListView(inflater.getContext());
GuessWordApp app = (GuessWordApp) inflater
.getContext().getApplicationContext();
Vector<WordLibAdapter> data = new Vector<WordLibAdapter>();
for (WordLibEntry e : app.getWordLib().getEntries())
{
data.add(new WordLibAdapter(inflater
.getContext(), e));
}
ArrayAdapter<WordLibAdapter> adapter = new ArrayAdapter<WordLibAdapter>(
inflater.getContext(),
android.R.layout.simple_list_item_single_choice,// android.R.layout.simple_list_item_multiple_choice,
data);
view.setAdapter(adapter);
// view.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
view.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
{
// final Set<String> wordCharCountSet = app
// .getDefaultGameSetting().getStringSet(
// NUM_WORD_CHARS_KEY,
// new HashSet<String>());
mNumWordChars = app.getDefaultGameSetting()
.getInt(NUM_WORD_CHARS_KEY,
mNumWordChars);
for (int i = 0; i < adapter.getCount(); ++i)
{
if (adapter.getItem(i).mEntry.mNumChars == mNumWordChars)
{
view.setItemChecked(i, true);
break;
}
}
}
view.setOnItemClickListener(new OnItemClickListener()
{
@Override
public void onItemClick(AdapterView<?> parent,
View view, int position, long id)
{
// if (((ListView) view)
// .isItemChecked(position))
{
GuessWordApp app = GuessWordApp
.getApp(view.getContext());
// Set<String> wordCharCountSet =
// app
// .getDefaultGameSetting()
// .getStringSet(
// NUM_WORD_CHARS_KEY,
// new HashSet<String>());
WordLibAdapter lib = (WordLibAdapter) parent
.getItemAtPosition(position);
mNumWordChars = lib.mEntry.mNumChars;
// if (!wordCharCountSet
// .contains(lib.mEntry.mNumChars))
{
// wordCharCountSet.add(Integer
// .toString(lib.mEntry.mNumChars));
SharedPreferences.Editor editor = app
.getDefaultGameSetting()
.edit();
editor.putInt(NUM_WORD_CHARS_KEY,
mNumWordChars);
editor.commit();
}
}
}
});
return view;
}
public CharSequence getPageTitle()
{
return getApplicationContext().getString(
R.string.title_select_player_num);
}
},
};
}
@Override
public Fragment getItem(int position)
{
return mFragments[position];
}
@Override
public int getCount()
{
return mFragments.length;
}
@Override
public int getItemPosition(Object object)
{
for (int i = 0; i < mFragments.length; ++i)
if (mFragments[i] == object)
return i;
return POSITION_UNCHANGED;
}
@Override
public CharSequence getPageTitle(int position)
{
return mFragments[position].getPageTitle();
}
}
/**
* A dummy fragment representing a section of the app, but that simply
* displays dummy text.
*/
public static class DummySectionFragment extends Fragment
{
public DummySectionFragment()
{
super();
}
public CharSequence getPageTitle()
{
// if (getPageTitleID() > 0)
// return getResources().getString(getPageTitleID());
return this.toString();
// return mTitle;
}
protected View createChildView(LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState)
{
return null;
}
@Override
final public View onCreateView(LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState)
{
StartGameActivity activity = (StartGameActivity) getActivity();
LinearLayout layout = new LinearLayout(activity);
layout.setLayoutParams(new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT, 0));
layout.setOrientation(LinearLayout.VERTICAL);
{
View view = createChildView(inflater, container,
savedInstanceState);
view.setLayoutParams(new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT, 1));
layout.addView(view);
}
{
Button btn = new Button(layout.getContext());
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT, 0);
btn.setLayoutParams(params);
//btn.setGravity(Gravity.CENTER);
final int position = activity.mSectionsPagerAdapter.getItemPosition(this);
if ((position == activity.mSectionsPagerAdapter.getCount() - 1))
{
btn.setText(R.string.start_game);
btn.setOnClickListener(new OnClickListener()
{
@Override
public void onClick(View v)
{
StartGameActivity activity = (StartGameActivity) v
.getContext();
activity.startGame();
}
});
}
else
{
btn.setText(R.string.next_step);
btn.setOnClickListener(new OnClickListener()
{
@Override
public void onClick(View v)
{
StartGameActivity activity = (StartGameActivity) v
.getContext();
activity.mViewPager
.setCurrentItem(activity.mViewPager
.getCurrentItem() + 1);
}
});
}
layout.addView(btn);
}
return layout;
}
}
public void startGame()
{
GuessWordApp app = GuessWordApp.getApp(this);
int[] numPlayers = { 3, 2, 2 };
for (Rule r : app.getDefaultRules())
{
if (r.getTotalPlayers() == mNumPlayers)
{
numPlayers = r.getNumPlayers();
break;
}
}
{
Intent intent = new Intent();
// Bundle bundle = new Bundle();
// bundle.putSerializable("game",game);
// intent.putExtra("bundle", bundle);
intent.putExtra("numPlayers", numPlayers);
intent.putExtra("numWordChars", mNumWordChars);
intent.setClass(this, GameActivity.class);
startActivity(intent);
finish();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import com.google.auto.value.AutoValue;
import java.io.IOException;
import java.io.Serializable;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.SeekableByteChannel;
import java.nio.charset.StandardCharsets;
import javax.annotation.Nullable;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.io.fs.EmptyMatchTreatment;
import org.apache.beam.sdk.io.fs.MatchResult;
import org.apache.beam.sdk.options.ValueProvider;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Reshuffle;
import org.apache.beam.sdk.transforms.Values;
import org.apache.beam.sdk.transforms.Watch;
import org.apache.beam.sdk.transforms.Watch.Growth.TerminationCondition;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.display.HasDisplayData;
import org.apache.beam.sdk.util.StreamUtils;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Transforms for working with files. Currently includes matching of filepatterns via {@link #match}
* and {@link #matchAll}, and reading matches via {@link #readMatches}.
*/
public class FileIO {
private static final Logger LOG = LoggerFactory.getLogger(FileIO.class);
/**
* Matches a filepattern using {@link FileSystems#match} and produces a collection of matched
* resources (both files and directories) as {@link MatchResult.Metadata}.
*
* <p>By default, matches the filepattern once and produces a bounded {@link PCollection}. To
* continuously watch the filepattern for new matches, use {@link MatchAll#continuously(Duration,
* TerminationCondition)} - this will produce an unbounded {@link PCollection}.
*
* <p>By default, a filepattern matching no resources is treated according to {@link
* EmptyMatchTreatment#DISALLOW}. To configure this behavior, use {@link
* Match#withEmptyMatchTreatment}.
*/
public static Match match() {
return new AutoValue_FileIO_Match.Builder()
.setConfiguration(MatchConfiguration.create(EmptyMatchTreatment.DISALLOW))
.build();
}
/**
* Like {@link #match}, but matches each filepattern in a collection of filepatterns.
*
* <p>Resources are not deduplicated between filepatterns, i.e. if the same resource matches
* multiple filepatterns, it will be produced multiple times.
*
* <p>By default, a filepattern matching no resources is treated according to {@link
* EmptyMatchTreatment#ALLOW_IF_WILDCARD}. To configure this behavior, use {@link
* MatchAll#withEmptyMatchTreatment}.
*/
public static MatchAll matchAll() {
return new AutoValue_FileIO_MatchAll.Builder()
.setConfiguration(MatchConfiguration.create(EmptyMatchTreatment.ALLOW_IF_WILDCARD))
.build();
}
/**
* Converts each result of {@link #match} or {@link #matchAll} to a {@link ReadableFile} which can
* be used to read the contents of each file, optionally decompressing it.
*/
public static ReadMatches readMatches() {
return new AutoValue_FileIO_ReadMatches.Builder()
.setCompression(Compression.AUTO)
.setDirectoryTreatment(ReadMatches.DirectoryTreatment.SKIP)
.build();
}
/** A utility class for accessing a potentially compressed file. */
public static final class ReadableFile {
private final MatchResult.Metadata metadata;
private final Compression compression;
ReadableFile(MatchResult.Metadata metadata, Compression compression) {
this.metadata = metadata;
this.compression = compression;
}
/** Returns the {@link MatchResult.Metadata} of the file. */
public MatchResult.Metadata getMetadata() {
return metadata;
}
/** Returns the method with which this file will be decompressed in {@link #open}. */
public Compression getCompression() {
return compression;
}
/**
* Returns a {@link ReadableByteChannel} reading the data from this file, potentially
* decompressing it using {@link #getCompression}.
*/
public ReadableByteChannel open() throws IOException {
return compression.readDecompressed(FileSystems.open(metadata.resourceId()));
}
/**
* Returns a {@link SeekableByteChannel} equivalent to {@link #open}, but fails if this file is
* not {@link MatchResult.Metadata#isReadSeekEfficient seekable}.
*/
public SeekableByteChannel openSeekable() throws IOException {
checkState(
getMetadata().isReadSeekEfficient(),
"The file %s is not seekable",
metadata.resourceId());
return ((SeekableByteChannel) open());
}
/** Returns the full contents of the file as bytes. */
public byte[] readFullyAsBytes() throws IOException {
return StreamUtils.getBytes(Channels.newInputStream(open()));
}
/** Returns the full contents of the file as a {@link String} decoded as UTF-8. */
public String readFullyAsUTF8String() throws IOException {
return new String(readFullyAsBytes(), StandardCharsets.UTF_8);
}
@Override
public String toString() {
return "ReadableFile{metadata=" + metadata + ", compression=" + compression + '}';
}
}
/**
* Describes configuration for matching filepatterns, such as {@link EmptyMatchTreatment} and
* continuous watching for matching files.
*/
@AutoValue
public abstract static class MatchConfiguration implements HasDisplayData, Serializable {
/** Creates a {@link MatchConfiguration} with the given {@link EmptyMatchTreatment}. */
public static MatchConfiguration create(EmptyMatchTreatment emptyMatchTreatment) {
return new AutoValue_FileIO_MatchConfiguration.Builder()
.setEmptyMatchTreatment(emptyMatchTreatment)
.build();
}
abstract EmptyMatchTreatment getEmptyMatchTreatment();
@Nullable
abstract Duration getWatchInterval();
@Nullable
abstract TerminationCondition<String, ?> getWatchTerminationCondition();
abstract Builder toBuilder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setEmptyMatchTreatment(EmptyMatchTreatment treatment);
abstract Builder setWatchInterval(Duration watchInterval);
abstract Builder setWatchTerminationCondition(TerminationCondition<String, ?> condition);
abstract MatchConfiguration build();
}
/** Sets the {@link EmptyMatchTreatment}. */
public MatchConfiguration withEmptyMatchTreatment(EmptyMatchTreatment treatment) {
return toBuilder().setEmptyMatchTreatment(treatment).build();
}
/**
* Continuously watches for new files at the given interval until the given termination
* condition is reached, where the input to the condition is the filepattern.
*/
public MatchConfiguration continuously(
Duration interval, TerminationCondition<String, ?> condition) {
return toBuilder().setWatchInterval(interval).setWatchTerminationCondition(condition).build();
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
builder
.add(
DisplayData.item("emptyMatchTreatment", getEmptyMatchTreatment().toString())
.withLabel("Treatment of filepatterns that match no files"))
.addIfNotNull(
DisplayData.item("watchForNewFilesInterval", getWatchInterval())
.withLabel("Interval to watch for new files"));
}
}
/** Implementation of {@link #match}. */
@AutoValue
public abstract static class Match extends PTransform<PBegin, PCollection<MatchResult.Metadata>> {
@Nullable
abstract ValueProvider<String> getFilepattern();
abstract MatchConfiguration getConfiguration();
abstract Builder toBuilder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setFilepattern(ValueProvider<String> filepattern);
abstract Builder setConfiguration(MatchConfiguration configuration);
abstract Match build();
}
/** Matches the given filepattern. */
public Match filepattern(String filepattern) {
return this.filepattern(ValueProvider.StaticValueProvider.of(filepattern));
}
/** Like {@link #filepattern(String)} but using a {@link ValueProvider}. */
public Match filepattern(ValueProvider<String> filepattern) {
return toBuilder().setFilepattern(filepattern).build();
}
/** Sets the {@link MatchConfiguration}. */
public Match withConfiguration(MatchConfiguration configuration) {
return toBuilder().setConfiguration(configuration).build();
}
/** See {@link MatchConfiguration#withEmptyMatchTreatment(EmptyMatchTreatment)}. */
public Match withEmptyMatchTreatment(EmptyMatchTreatment treatment) {
return withConfiguration(getConfiguration().withEmptyMatchTreatment(treatment));
}
/**
* See {@link MatchConfiguration#continuously}. The returned {@link PCollection} is unbounded.
*
* <p>This works only in runners supporting {@link Experimental.Kind#SPLITTABLE_DO_FN}.
*/
@Experimental(Experimental.Kind.SPLITTABLE_DO_FN)
public Match continuously(
Duration pollInterval, TerminationCondition<String, ?> terminationCondition) {
return withConfiguration(getConfiguration().continuously(pollInterval, terminationCondition));
}
@Override
public PCollection<MatchResult.Metadata> expand(PBegin input) {
return input
.apply("Create filepattern", Create.ofProvider(getFilepattern(), StringUtf8Coder.of()))
.apply("Via MatchAll", matchAll().withConfiguration(getConfiguration()));
}
}
/** Implementation of {@link #matchAll}. */
@AutoValue
public abstract static class MatchAll
extends PTransform<PCollection<String>, PCollection<MatchResult.Metadata>> {
abstract MatchConfiguration getConfiguration();
abstract Builder toBuilder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConfiguration(MatchConfiguration configuration);
abstract MatchAll build();
}
/** Like {@link Match#withConfiguration}. */
public MatchAll withConfiguration(MatchConfiguration configuration) {
return toBuilder().setConfiguration(configuration).build();
}
/** Like {@link Match#withEmptyMatchTreatment}. */
public MatchAll withEmptyMatchTreatment(EmptyMatchTreatment treatment) {
return withConfiguration(getConfiguration().withEmptyMatchTreatment(treatment));
}
/** Like {@link Match#continuously}. */
@Experimental(Experimental.Kind.SPLITTABLE_DO_FN)
public MatchAll continuously(
Duration pollInterval, TerminationCondition<String, ?> terminationCondition) {
return withConfiguration(getConfiguration().continuously(pollInterval, terminationCondition));
}
@Override
public PCollection<MatchResult.Metadata> expand(PCollection<String> input) {
PCollection<MatchResult.Metadata> res;
if (getConfiguration().getWatchInterval() == null) {
res = input.apply(
"Match filepatterns",
ParDo.of(new MatchFn(getConfiguration().getEmptyMatchTreatment())));
} else {
res = input
.apply(
"Continuously match filepatterns",
Watch.growthOf(new MatchPollFn())
.withPollInterval(getConfiguration().getWatchInterval())
.withTerminationPerInput(getConfiguration().getWatchTerminationCondition()))
.apply(Values.<MatchResult.Metadata>create());
}
return res.apply(Reshuffle.<MatchResult.Metadata>viaRandomKey());
}
private static class MatchFn extends DoFn<String, MatchResult.Metadata> {
private final EmptyMatchTreatment emptyMatchTreatment;
public MatchFn(EmptyMatchTreatment emptyMatchTreatment) {
this.emptyMatchTreatment = emptyMatchTreatment;
}
@ProcessElement
public void process(ProcessContext c) throws Exception {
String filepattern = c.element();
MatchResult match = FileSystems.match(filepattern, emptyMatchTreatment);
LOG.info("Matched {} files for pattern {}", match.metadata().size(), filepattern);
for (MatchResult.Metadata metadata : match.metadata()) {
c.output(metadata);
}
}
}
private static class MatchPollFn extends Watch.Growth.PollFn<String, MatchResult.Metadata> {
@Override
public Watch.Growth.PollResult<MatchResult.Metadata> apply(String element, Context c)
throws Exception {
return Watch.Growth.PollResult.incomplete(
Instant.now(), FileSystems.match(element, EmptyMatchTreatment.ALLOW).metadata());
}
}
}
/** Implementation of {@link #readMatches}. */
@AutoValue
public abstract static class ReadMatches
extends PTransform<PCollection<MatchResult.Metadata>, PCollection<ReadableFile>> {
enum DirectoryTreatment {
SKIP,
PROHIBIT
}
abstract Compression getCompression();
abstract DirectoryTreatment getDirectoryTreatment();
abstract Builder toBuilder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setCompression(Compression compression);
abstract Builder setDirectoryTreatment(DirectoryTreatment directoryTreatment);
abstract ReadMatches build();
}
/** Reads files using the given {@link Compression}. Default is {@link Compression#AUTO}. */
public ReadMatches withCompression(Compression compression) {
checkArgument(compression != null, "compression can not be null");
return toBuilder().setCompression(compression).build();
}
/**
* Controls how to handle directories in the input {@link PCollection}. Default is {@link
* DirectoryTreatment#SKIP}.
*/
public ReadMatches withDirectoryTreatment(DirectoryTreatment directoryTreatment) {
checkArgument(directoryTreatment != null, "directoryTreatment can not be null");
return toBuilder().setDirectoryTreatment(directoryTreatment).build();
}
@Override
public PCollection<ReadableFile> expand(PCollection<MatchResult.Metadata> input) {
return input.apply(ParDo.of(new ToReadableFileFn(this)));
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
builder.add(DisplayData.item("compression", getCompression().toString()));
builder.add(DisplayData.item("directoryTreatment", getDirectoryTreatment().toString()));
}
private static class ToReadableFileFn extends DoFn<MatchResult.Metadata, ReadableFile> {
private final ReadMatches spec;
private ToReadableFileFn(ReadMatches spec) {
this.spec = spec;
}
@ProcessElement
public void process(ProcessContext c) {
MatchResult.Metadata metadata = c.element();
if (metadata.resourceId().isDirectory()) {
switch (spec.getDirectoryTreatment()) {
case SKIP:
return;
case PROHIBIT:
throw new IllegalArgumentException(
"Trying to read " + metadata.resourceId() + " which is a directory");
default:
throw new UnsupportedOperationException(
"Unknown DirectoryTreatment: " + spec.getDirectoryTreatment());
}
}
Compression compression =
(spec.getCompression() == Compression.AUTO)
? Compression.detect(metadata.resourceId().getFilename())
: spec.getCompression();
c.output(
new ReadableFile(
MatchResult.Metadata.builder()
.setResourceId(metadata.resourceId())
.setSizeBytes(metadata.sizeBytes())
.setIsReadSeekEfficient(
metadata.isReadSeekEfficient() && compression == Compression.UNCOMPRESSED)
.build(),
compression));
}
}
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.ocaml;
import static com.facebook.buck.ocaml.OCamlRuleBuilder.createOCamlLinkTarget;
import static com.facebook.buck.ocaml.OCamlRuleBuilder.createStaticLibraryBuildTarget;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.cli.BuckConfig;
import com.facebook.buck.cli.Config;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.cxx.CxxBuckConfig;
import com.facebook.buck.cxx.CxxDescriptionEnhancer;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.cxx.CxxSource;
import com.facebook.buck.cxx.CxxSourceRuleFactory;
import com.facebook.buck.cxx.CxxSourceRuleFactoryHelper;
import com.facebook.buck.cxx.DefaultCxxPlatforms;
import com.facebook.buck.cxx.HeaderVisibility;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.testutil.integration.BuckBuildLog;
import com.facebook.buck.testutil.integration.DebuggableTemporaryFolder;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.environment.Architecture;
import com.facebook.buck.util.environment.Platform;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
public class OCamlIntegrationTest {
@Rule
public DebuggableTemporaryFolder tmp = new DebuggableTemporaryFolder();
@Before
public void checkOCamlIsConfigured() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "ocaml", tmp);
workspace.setUp();
ProjectFilesystem filesystem = new ProjectFilesystem(tmp.getRootPath());
Config rawConfig = Config.createDefaultConfig(
filesystem.getRootPath(),
ImmutableMap.<String, ImmutableMap<String, String>>of());
BuckConfig buckConfig = new BuckConfig(
rawConfig,
filesystem,
Architecture.detect(),
Platform.detect(),
ImmutableMap.copyOf(System.getenv()));
OCamlBuckConfig oCamlBuckConfig = new OCamlBuckConfig(
Platform.detect(),
buckConfig);
assumeTrue(oCamlBuckConfig.getOCamlCompiler().isPresent());
assumeTrue(oCamlBuckConfig.getOCamlBytecodeCompiler().isPresent());
assumeTrue(oCamlBuckConfig.getOCamlDepTool().isPresent());
assumeTrue(oCamlBuckConfig.getYaccCompiler().isPresent());
assumeTrue(oCamlBuckConfig.getLexCompiler().isPresent());
}
@Test
public void testHelloOcamlBuild() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "ocaml", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance(
workspace.getDestPath(),
"//hello_ocaml:hello_ocaml");
BuildTarget binary = createOCamlLinkTarget(target);
BuildTarget lib = BuildTargetFactory.newInstance(
workspace.getDestPath(),
"//hello_ocaml:ocamllib");
BuildTarget staticLib = createStaticLibraryBuildTarget(lib);
ImmutableSet<BuildTarget> targets = ImmutableSet.of(target, binary, lib, staticLib);
workspace.runBuckCommand("build", target.toString()).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(staticLib.toString());
workspace.resetBuildLogFile();
// Check that running a build again results in no builds since everything is up to
// date.
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(ImmutableSet.of(binary, target), buildLog.getAllTargets());
buildLog.assertTargetHadMatchingRuleKey(binary.toString());
buildLog.assertTargetHadMatchingRuleKey(target.toString());
workspace.resetBuildLogFile();
// Update the source file.
workspace.replaceFileContents("hello_ocaml/amodule.ml", "v2", "v3");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetHadMatchingRuleKey(staticLib.toString());
workspace.resetBuildLogFile();
// Update the source file.
workspace.replaceFileContents("hello_ocaml/ocamllib/m1.ml", "print me", "print Me");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(staticLib.toString());
workspace.resetBuildLogFile();
// Update the source file.
workspace.replaceFileContents("hello_ocaml/BUCK", "#INSERT_POINT", "'ocamllib/dummy.ml',");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(staticLib.toString());
workspace.resetBuildLogFile();
BuildTarget lib1 = BuildTargetFactory.newInstance(
workspace.getDestPath(),
"//hello_ocaml:ocamllib1");
BuildTarget staticLib1 = createStaticLibraryBuildTarget(lib1);
ImmutableSet<BuildTarget> targets1 = ImmutableSet.of(target, binary, lib1, staticLib1);
// We rebuild if lib name changes
workspace.replaceFileContents("hello_ocaml/BUCK", "name = 'ocamllib'", "name = 'ocamllib1'");
workspace.replaceFileContents(
"hello_ocaml/BUCK",
":ocamllib",
":ocamllib1");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets1));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(staticLib1.toString());
}
@Test
public void testLexAndYaccBuild() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
"ocaml",
tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance(workspace.getDestPath(), "//calc:calc");
BuildTarget binary = createOCamlLinkTarget(target);
ImmutableSet<BuildTarget> targets = ImmutableSet.of(target, binary);
workspace.runBuckCommand("build", target.toString()).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertEquals(
targets,
buildLog.getAllTargets());
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(binary.toString());
workspace.resetBuildLogFile();
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(ImmutableSet.of(binary, target), buildLog.getAllTargets());
buildLog.assertTargetHadMatchingRuleKey(binary.toString());
buildLog.assertTargetHadMatchingRuleKey(target.toString());
workspace.resetBuildLogFile();
workspace.replaceFileContents("calc/lexer.mll", "The type token", "the type token");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(
targets,
buildLog.getAllTargets());
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(binary.toString());
workspace.resetBuildLogFile();
workspace.replaceFileContents("calc/parser.mly", "the entry point", "The entry point");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(
targets,
buildLog.getAllTargets());
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(binary.toString());
}
@Test
public void testCInteropBuild() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
"ocaml",
tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance(workspace.getDestPath(), "//ctest:ctest");
BuildTarget binary = createOCamlLinkTarget(target);
ImmutableSet<BuildTarget> targets = ImmutableSet.of(target, binary);
workspace.runBuckCommand("build", target.toString()).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
workspace.resetBuildLogFile();
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(ImmutableSet.of(binary, target), buildLog.getAllTargets());
buildLog.assertTargetHadMatchingRuleKey(binary.toString());
buildLog.assertTargetHadMatchingRuleKey(target.toString());
workspace.resetBuildLogFile();
workspace.replaceFileContents("ctest/ctest.c", "NATIVE PLUS", "Native Plus");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(binary.toString());
workspace.resetBuildLogFile();
workspace.replaceFileContents("ctest/BUCK", "#INSERTION_POINT", "compiler_flags=['-noassert']");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(binary.toString());
workspace.resetBuildLogFile();
workspace.replaceFileContents(
"ctest/BUCK",
"compiler_flags=['-noassert']",
"compiler_flags=[]");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(binary.toString());
workspace.resetBuildLogFile();
workspace.replaceFileContents("ctest/BUCK", "compiler_flags=[]", "compiler_flags=[]");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(ImmutableSet.of(binary, target), buildLog.getAllTargets());
buildLog.assertTargetHadMatchingRuleKey(binary.toString());
buildLog.assertTargetHadMatchingRuleKey(target.toString());
}
@Test
public void testSimpleBuildWithLib() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
"ocaml",
tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance(workspace.getDestPath(), "//:plus");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
}
@Test
public void testRootBuildTarget() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
"ocaml",
tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance(workspace.getDestPath(), "//:main");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
}
@Test
public void testPrebuiltLibrary() throws IOException {
if (Platform.detect() == Platform.MACOS) {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
"ocaml",
tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance(
workspace.getDestPath(),
"//ocaml_ext_mac:ocaml_ext");
BuildTarget binary = createOCamlLinkTarget(target);
BuildTarget libplus = BuildTargetFactory.newInstance(
workspace.getDestPath(),
"//ocaml_ext_mac:plus");
ImmutableSet<BuildTarget> targets = ImmutableSet.of(target, binary, libplus);
workspace.runBuckCommand("build", target.toString()).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(binary.toString());
workspace.resetBuildLogFile();
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetHadMatchingRuleKey(target.toString());
buildLog.assertTargetHadMatchingRuleKey(binary.toString());
workspace.resetBuildLogFile();
workspace.replaceFileContents(
"ocaml_ext_mac/BUCK",
"libplus_lib",
"libplus_lib1");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(binary.toString());
}
}
@Test
public void testCppLibraryDependency() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
"ocaml",
tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance(workspace.getDestPath(), "//clib:clib");
BuildTarget binary = createOCamlLinkTarget(target);
BuildTarget libplus = BuildTargetFactory.newInstance(workspace.getDestPath(), "//clib:plus");
BuildTarget libplusStatic = createStaticLibraryBuildTarget(libplus);
BuildTarget cclib = BuildTargetFactory.newInstance(workspace.getDestPath(), "//clib:cc");
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(),
cclib,
cxxPlatform);
BuildTarget cclibbin =
CxxDescriptionEnhancer.createStaticLibraryBuildTarget(
cclib,
cxxPlatform.getFlavor(),
CxxSourceRuleFactory.PicType.PDC);
String sourceName = "cc/cc.cpp";
BuildTarget ppObj =
cxxSourceRuleFactory.createPreprocessBuildTarget(
sourceName,
CxxSource.Type.CXX,
CxxSourceRuleFactory.PicType.PDC);
BuildTarget ccObj =
cxxSourceRuleFactory.createCompileBuildTarget(
sourceName,
CxxSourceRuleFactory.PicType.PDC);
BuildTarget headerSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
cclib,
cxxPlatform.getFlavor(),
HeaderVisibility.PRIVATE);
BuildTarget exportedHeaderSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
cclib,
cxxPlatform.getFlavor(),
HeaderVisibility.PUBLIC);
ImmutableSet<BuildTarget> targets = ImmutableSet.of(
target,
binary,
libplus,
libplusStatic,
cclib,
cclibbin,
ccObj,
ppObj,
headerSymlinkTreeTarget,
exportedHeaderSymlinkTreeTarget);
workspace.runBuckCommand("build", target.toString()).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(binary.toString());
buildLog.assertTargetBuiltLocally(libplus.toString());
buildLog.assertTargetBuiltLocally(libplusStatic.toString());
buildLog.assertTargetBuiltLocally(cclibbin.toString());
buildLog.assertTargetBuiltLocally(ccObj.toString());
buildLog.assertTargetBuiltLocally(ppObj.toString());
buildLog.assertTargetBuiltLocally(headerSymlinkTreeTarget.toString());
buildLog.assertTargetBuiltLocally(exportedHeaderSymlinkTreeTarget.toString());
workspace.resetBuildLogFile();
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(ImmutableSet.of(binary, target), buildLog.getAllTargets());
buildLog.assertTargetHadMatchingRuleKey(binary.toString());
buildLog.assertTargetHadMatchingRuleKey(target.toString());
workspace.resetBuildLogFile();
workspace.replaceFileContents("clib/cc/cc.cpp", "Hi there", "hi there");
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertTrue(buildLog.getAllTargets().containsAll(targets));
buildLog.assertTargetBuiltLocally(target.toString());
buildLog.assertTargetBuiltLocally(binary.toString());
buildLog.assertTargetBuiltLocally(libplus.toString());
buildLog.assertTargetBuiltLocally(libplusStatic.toString());
buildLog.assertTargetBuiltLocally(cclibbin.toString());
buildLog.assertTargetBuiltLocally(ccObj.toString());
buildLog.assertTargetBuiltLocally(ppObj.toString());
buildLog.assertTargetHadMatchingRuleKey(headerSymlinkTreeTarget.toString());
buildLog.assertTargetHadMatchingRuleKey(exportedHeaderSymlinkTreeTarget.toString());
}
}
| |
package com.starquestminecraft.bungeecord.util;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.TypeAdapter;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
public class UUIDFetcher {
public static final long FEBRUARY_2015 = 1422748800000L;
private static final Gson GSON = new GsonBuilder().registerTypeAdapter(UUID.class, new UUIDTypeAdapter()).create();
private static final String PROFILE_URL = "https://api.mojang.com/users/profiles/minecraft/%s?at=%d";
private static final String NAME_URL = "https://api.mojang.com/user/profiles/%s/names";
private static final Map<String, Profile> PROFILE_CACHE = new ConcurrentHashMap<>();
private static final Map<UUID, String> NAME_CACHE = new ConcurrentHashMap<>();
private static final ExecutorService POOL = Executors.newCachedThreadPool();
private UUIDFetcher() {
}
/**
* Aynchronously fetches the profile for a specified name
*
* @param name The name
*/
public static Future<Profile> getProfileAsync(final String name) {
return POOL.submit(new Callable<Profile>() {
@Override
public Profile call() {
return getProfile(name);
}
});
}
/**
* Synchronously fetches and returns the profile for a specified name
*
* @param name The name
*
* @return The profile
*/
public static Profile getProfile(final String name) {
return getProfileAt(name, System.currentTimeMillis());
}
/**
* Asynchronously fetches the profile for a specified name and time
*
* @param name The name
* @param timestamp Time when the player had this name in milliseconds
*/
public static Future<Profile> getProfileAsync(final String name, final long timestamp) {
return POOL.submit(new Callable<Profile>() {
@Override
public Profile call() {
return getProfileAt(name, timestamp);
}
});
}
/**
* Synchronously fetches the UUID for a specified name and time
*
* @param name The name
* @param timestamp Time when the player had this name in milliseconds
*
* @see UUIDFetcher#FEBRUARY_2015
*/
public static Profile getProfileAt(final String name, final long timestamp) {
String key = name.toLowerCase();
if(PROFILE_CACHE.containsKey(key)) {
return PROFILE_CACHE.get(key);
}
try {
HttpURLConnection connection = (HttpURLConnection)new URL(String.format(PROFILE_URL, key, timestamp / 1000)).openConnection();
connection.setReadTimeout(5000);
Profile profile;
try(BufferedReader br = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
profile = GSON.fromJson(br, Profile.class);
}
PROFILE_CACHE.put(key, profile);
NAME_CACHE.put(profile.id, profile.name);
return profile;
}
catch(Exception ex) {
ex.printStackTrace();
}
return null;
}
/**
* Asynchronously fetches the name for a specified UUID
*
* @param uuid The UUID
*/
public static Future<String> getNameAsync(final UUID uuid) {
return POOL.submit(new Callable<String>() {
@Override
public String call() {
return getName(uuid);
}
});
}
/**
* Synchronously fetches and returns the name for a specified UUID
*
* @param uuid The UUID
*
* @return The name
*/
public static String getName(final UUID uuid) {
if(NAME_CACHE.containsKey(uuid)) {
return NAME_CACHE.get(uuid);
}
try {
HttpURLConnection connection = (HttpURLConnection)new URL(String.format(NAME_URL, UUIDTypeAdapter.fromUUID(uuid))).openConnection();
connection.setReadTimeout(5000);
Profile[] history;
try(BufferedReader br = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
history = GSON.fromJson(br, Profile[].class);
}
Profile current = history[history.length - 1];
PROFILE_CACHE.put(current.name.toLowerCase(), current);
NAME_CACHE.put(uuid, current.name);
return current.name;
}
catch(Exception ex) {
ex.printStackTrace();
}
return null;
}
public static class Profile {
private String name;
private UUID id;
private Profile(final UUID id, final String name) {
this.id = id;
this.name = name;
}
public UUID getID() {
return id;
}
public String getName() {
return name;
}
}
private static class UUIDTypeAdapter extends TypeAdapter<UUID> {
@Override
public void write(final JsonWriter writer, final UUID uuid) throws IOException {
writer.value(fromUUID(uuid));
}
@Override
public UUID read(final JsonReader reader) throws IOException {
return fromString(reader.nextString());
}
public static String fromUUID(final UUID uuid) {
return uuid.toString().replace("-", "");
}
public static UUID fromString(final String str) {
return UUID.fromString(str.replaceFirst("(\\w{8})(\\w{4})(\\w{4})(\\w{4})(\\w{12})", "$1-$2-$3-$4-$5"));
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.io;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.nio.charset.Charset;
/**
* @author traff
*/
public abstract class BaseOutputReader extends BaseDataReader {
/** See {@link #BaseOutputReader(Reader, Options)}, {@link #readAvailable}, and {@link #processInput} for reference. */
public static class Options {
public static final Options BLOCKING = withPolicy(SleepingPolicy.BLOCKING);
public static final Options NON_BLOCKING = withPolicy(SleepingPolicy.SIMPLE);
public SleepingPolicy policy() { return null; }
public boolean splitToLines() { return true; }
public boolean sendIncompleteLines() { return true; }
public boolean withSeparators() { return true; }
public static Options withPolicy(final SleepingPolicy policy) {
return new Options() {
@Override
public SleepingPolicy policy() {
return policy;
}
};
}
}
protected final Reader myReader;
private final Options myOptions;
private final char[] myInputBuffer = new char[8192];
private final StringBuilder myLineBuffer = new StringBuilder();
public BaseOutputReader(@NotNull InputStream inputStream, @Nullable Charset charset) {
this(createInputStreamReader(inputStream, charset));
}
public BaseOutputReader(@NotNull InputStream inputStream, @Nullable Charset charset, @NotNull Options options) {
this(createInputStreamReader(inputStream, charset), options);
}
public BaseOutputReader(@NotNull Reader reader) {
this(reader, new Options());
}
public BaseOutputReader(@NotNull Reader reader, @NotNull Options options) {
super(options.policy());
if (options.policy() == SleepingPolicy.BLOCKING && !(reader instanceof BaseInputStreamReader)) {
throw new IllegalArgumentException("Blocking policy can be used only with BaseInputStreamReader, that doesn't lock on close");
}
if (options.policy() != SleepingPolicy.BLOCKING && !options.sendIncompleteLines()) {
throw new IllegalArgumentException("In non-blocking mode, the reader cannot produce complete lines reliably");
}
myReader = reader;
myOptions = options;
}
private static Reader createInputStreamReader(@NotNull InputStream stream, @Nullable Charset charset) {
return charset == null ? new BaseInputStreamReader(stream) : new BaseInputStreamReader(stream, charset);
}
/**
* Reads as much data as possible without blocking.
* Relies on InputStream.ready method.
* When in doubt, take a look at {@link #readAvailableBlocking()}.
*
* @return true if non-zero amount of data has been read
* @throws IOException If an I/O error occurs
*/
protected final boolean readAvailableNonBlocking() throws IOException {
boolean read = false;
try {
int n;
while (myReader.ready() && (n = myReader.read(myInputBuffer)) >= 0) {
if (n > 0) {
read = true;
processInput(myInputBuffer, myLineBuffer, n);
}
}
}
finally {
if (myLineBuffer.length() > 0) {
sendText(myLineBuffer);
}
}
return read;
}
/**
* Reads data with blocking.
* Should be used in case when ready method always returns false for your input stream.
* Should be used if we want to to make our reader exit when end of stream reached.
* Could be used if we prefer IO-blocking over CPU sleeping.
*
* @return true if non-zero amount of data has been read, false if end of the stream is reached
* @throws IOException If an I/O error occurs
*/
protected final boolean readAvailableBlocking() throws IOException {
boolean read = false;
try {
int n;
while ((n = myReader.read(myInputBuffer)) >= 0) {
if (n > 0) {
read = true;
processInput(myInputBuffer, myLineBuffer, n);
}
if (!myReader.ready()) {
onBufferExhaustion();
}
}
}
finally {
if (myLineBuffer.length() > 0) {
sendText(myLineBuffer);
}
}
return read;
}
private void processInput(char[] buffer, StringBuilder line, int n) {
if (myOptions.splitToLines()) {
for (int i = 0; i < n; i++) {
char c = buffer[i];
if (c == '\r' && i + 1 < n && buffer[i + 1] == '\n') {
continue;
}
if (c != '\n' || myOptions.sendIncompleteLines() || myOptions.withSeparators()) {
line.append(c);
}
if (c == '\n') {
sendText(line);
}
}
if (line.length() > 0 && myOptions.sendIncompleteLines()) {
sendText(line);
}
}
else {
onTextAvailable(new String(buffer, 0, n));
}
}
private void sendText(@NotNull StringBuilder line) {
onTextAvailable(line.toString());
line.setLength(0);
}
@Override
protected boolean readAvailable() throws IOException {
return mySleepingPolicy == SleepingPolicy.BLOCKING ? readAvailableBlocking() : readAvailableNonBlocking();
}
@Override
protected void close() throws IOException {
myReader.close();
}
@Override
public void stop() {
super.stop();
if (mySleepingPolicy == SleepingPolicy.BLOCKING) {
// we can't count on super.stop() since it only sets 'isRunning = false', and blocked Reader.read won't wake up.
try { close(); }
catch (IOException ignore) { }
}
}
protected void onBufferExhaustion() { }
protected abstract void onTextAvailable(@NotNull String text);
//<editor-fold desc="Deprecated stuff.">
/** @deprecated use {@link #BaseOutputReader(InputStream, Charset, Options)} (to be removed in IDEA 18) */
@SuppressWarnings("unused")
public BaseOutputReader(@NotNull InputStream inputStream, @Nullable Charset charset, @Nullable SleepingPolicy policy) {
this(inputStream, charset, Options.withPolicy(policy));
}
/** @deprecated use {@link #BaseOutputReader(Reader, Options)} (to be removed in IDEA 18) */
@SuppressWarnings("unused")
public BaseOutputReader(@NotNull Reader reader, @Nullable SleepingPolicy policy) {
this(reader, Options.withPolicy(policy));
}
//</editor-fold>
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.transfer.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* The protocol settings that are configured for your server.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/transfer-2018-11-05/ProtocolDetails" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ProtocolDetails implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Indicates passive mode, for FTP and FTPS protocols. Enter a single dotted-quad IPv4 address, such as the external
* IP address of a firewall, router, or load balancer. For example:
* </p>
* <p>
* <code> aws transfer update-server --protocol-details PassiveIp=<i>0.0.0.0</i> </code>
* </p>
* <p>
* Replace <code> <i>0.0.0.0</i> </code> in the example above with the actual IP address you want to use.
* </p>
* <note>
* <p>
* If you change the <code>PassiveIp</code> value, you must stop and then restart your Transfer server for the
* change to take effect. For details on using Passive IP (PASV) in a NAT environment, see <a href=
* "http://aws.amazon.com/blogs/storage/configuring-your-ftps-server-behind-a-firewall-or-nat-with-aws-transfer-family/"
* >Configuring your FTPS server behind a firewall or NAT with Amazon Web Services Transfer Family</a>.
* </p>
* </note>
*/
private String passiveIp;
/**
* <p>
* A property used with Transfer servers that use the FTPS protocol. TLS Session Resumption provides a mechanism to
* resume or share a negotiated secret key between the control and data connection for an FTPS session.
* <code>TlsSessionResumptionMode</code> determines whether or not the server resumes recent, negotiated sessions
* through a unique session ID. This property is available during <code>CreateServer</code> and
* <code>UpdateServer</code> calls. If a <code>TlsSessionResumptionMode</code> value is not specified during
* CreateServer, it is set to <code>ENFORCED</code> by default.
* </p>
* <ul>
* <li>
* <p>
* <code>DISABLED</code>: the server does not process TLS session resumption client requests and creates a new TLS
* session for each request.
* </p>
* </li>
* <li>
* <p>
* <code>ENABLED</code>: the server processes and accepts clients that are performing TLS session resumption. The
* server doesn't reject client data connections that do not perform the TLS session resumption client processing.
* </p>
* </li>
* <li>
* <p>
* <code>ENFORCED</code>: the server processes and accepts clients that are performing TLS session resumption. The
* server rejects client data connections that do not perform the TLS session resumption client processing. Before
* you set the value to <code>ENFORCED</code>, test your clients.
* </p>
* <note>
* <p>
* Not all FTPS clients perform TLS session resumption. So, if you choose to enforce TLS session resumption, you
* prevent any connections from FTPS clients that don't perform the protocol negotiation. To determine whether or
* not you can use the <code>ENFORCED</code> value, you need to test your clients.
* </p>
* </note></li>
* </ul>
*/
private String tlsSessionResumptionMode;
/**
* <p>
* Indicates passive mode, for FTP and FTPS protocols. Enter a single dotted-quad IPv4 address, such as the external
* IP address of a firewall, router, or load balancer. For example:
* </p>
* <p>
* <code> aws transfer update-server --protocol-details PassiveIp=<i>0.0.0.0</i> </code>
* </p>
* <p>
* Replace <code> <i>0.0.0.0</i> </code> in the example above with the actual IP address you want to use.
* </p>
* <note>
* <p>
* If you change the <code>PassiveIp</code> value, you must stop and then restart your Transfer server for the
* change to take effect. For details on using Passive IP (PASV) in a NAT environment, see <a href=
* "http://aws.amazon.com/blogs/storage/configuring-your-ftps-server-behind-a-firewall-or-nat-with-aws-transfer-family/"
* >Configuring your FTPS server behind a firewall or NAT with Amazon Web Services Transfer Family</a>.
* </p>
* </note>
*
* @param passiveIp
* Indicates passive mode, for FTP and FTPS protocols. Enter a single dotted-quad IPv4 address, such as the
* external IP address of a firewall, router, or load balancer. For example: </p>
* <p>
* <code> aws transfer update-server --protocol-details PassiveIp=<i>0.0.0.0</i> </code>
* </p>
* <p>
* Replace <code> <i>0.0.0.0</i> </code> in the example above with the actual IP address you want to use.
* </p>
* <note>
* <p>
* If you change the <code>PassiveIp</code> value, you must stop and then restart your Transfer server for
* the change to take effect. For details on using Passive IP (PASV) in a NAT environment, see <a href=
* "http://aws.amazon.com/blogs/storage/configuring-your-ftps-server-behind-a-firewall-or-nat-with-aws-transfer-family/"
* >Configuring your FTPS server behind a firewall or NAT with Amazon Web Services Transfer Family</a>.
* </p>
*/
public void setPassiveIp(String passiveIp) {
this.passiveIp = passiveIp;
}
/**
* <p>
* Indicates passive mode, for FTP and FTPS protocols. Enter a single dotted-quad IPv4 address, such as the external
* IP address of a firewall, router, or load balancer. For example:
* </p>
* <p>
* <code> aws transfer update-server --protocol-details PassiveIp=<i>0.0.0.0</i> </code>
* </p>
* <p>
* Replace <code> <i>0.0.0.0</i> </code> in the example above with the actual IP address you want to use.
* </p>
* <note>
* <p>
* If you change the <code>PassiveIp</code> value, you must stop and then restart your Transfer server for the
* change to take effect. For details on using Passive IP (PASV) in a NAT environment, see <a href=
* "http://aws.amazon.com/blogs/storage/configuring-your-ftps-server-behind-a-firewall-or-nat-with-aws-transfer-family/"
* >Configuring your FTPS server behind a firewall or NAT with Amazon Web Services Transfer Family</a>.
* </p>
* </note>
*
* @return Indicates passive mode, for FTP and FTPS protocols. Enter a single dotted-quad IPv4 address, such as the
* external IP address of a firewall, router, or load balancer. For example: </p>
* <p>
* <code> aws transfer update-server --protocol-details PassiveIp=<i>0.0.0.0</i> </code>
* </p>
* <p>
* Replace <code> <i>0.0.0.0</i> </code> in the example above with the actual IP address you want to use.
* </p>
* <note>
* <p>
* If you change the <code>PassiveIp</code> value, you must stop and then restart your Transfer server for
* the change to take effect. For details on using Passive IP (PASV) in a NAT environment, see <a href=
* "http://aws.amazon.com/blogs/storage/configuring-your-ftps-server-behind-a-firewall-or-nat-with-aws-transfer-family/"
* >Configuring your FTPS server behind a firewall or NAT with Amazon Web Services Transfer Family</a>.
* </p>
*/
public String getPassiveIp() {
return this.passiveIp;
}
/**
* <p>
* Indicates passive mode, for FTP and FTPS protocols. Enter a single dotted-quad IPv4 address, such as the external
* IP address of a firewall, router, or load balancer. For example:
* </p>
* <p>
* <code> aws transfer update-server --protocol-details PassiveIp=<i>0.0.0.0</i> </code>
* </p>
* <p>
* Replace <code> <i>0.0.0.0</i> </code> in the example above with the actual IP address you want to use.
* </p>
* <note>
* <p>
* If you change the <code>PassiveIp</code> value, you must stop and then restart your Transfer server for the
* change to take effect. For details on using Passive IP (PASV) in a NAT environment, see <a href=
* "http://aws.amazon.com/blogs/storage/configuring-your-ftps-server-behind-a-firewall-or-nat-with-aws-transfer-family/"
* >Configuring your FTPS server behind a firewall or NAT with Amazon Web Services Transfer Family</a>.
* </p>
* </note>
*
* @param passiveIp
* Indicates passive mode, for FTP and FTPS protocols. Enter a single dotted-quad IPv4 address, such as the
* external IP address of a firewall, router, or load balancer. For example: </p>
* <p>
* <code> aws transfer update-server --protocol-details PassiveIp=<i>0.0.0.0</i> </code>
* </p>
* <p>
* Replace <code> <i>0.0.0.0</i> </code> in the example above with the actual IP address you want to use.
* </p>
* <note>
* <p>
* If you change the <code>PassiveIp</code> value, you must stop and then restart your Transfer server for
* the change to take effect. For details on using Passive IP (PASV) in a NAT environment, see <a href=
* "http://aws.amazon.com/blogs/storage/configuring-your-ftps-server-behind-a-firewall-or-nat-with-aws-transfer-family/"
* >Configuring your FTPS server behind a firewall or NAT with Amazon Web Services Transfer Family</a>.
* </p>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProtocolDetails withPassiveIp(String passiveIp) {
setPassiveIp(passiveIp);
return this;
}
/**
* <p>
* A property used with Transfer servers that use the FTPS protocol. TLS Session Resumption provides a mechanism to
* resume or share a negotiated secret key between the control and data connection for an FTPS session.
* <code>TlsSessionResumptionMode</code> determines whether or not the server resumes recent, negotiated sessions
* through a unique session ID. This property is available during <code>CreateServer</code> and
* <code>UpdateServer</code> calls. If a <code>TlsSessionResumptionMode</code> value is not specified during
* CreateServer, it is set to <code>ENFORCED</code> by default.
* </p>
* <ul>
* <li>
* <p>
* <code>DISABLED</code>: the server does not process TLS session resumption client requests and creates a new TLS
* session for each request.
* </p>
* </li>
* <li>
* <p>
* <code>ENABLED</code>: the server processes and accepts clients that are performing TLS session resumption. The
* server doesn't reject client data connections that do not perform the TLS session resumption client processing.
* </p>
* </li>
* <li>
* <p>
* <code>ENFORCED</code>: the server processes and accepts clients that are performing TLS session resumption. The
* server rejects client data connections that do not perform the TLS session resumption client processing. Before
* you set the value to <code>ENFORCED</code>, test your clients.
* </p>
* <note>
* <p>
* Not all FTPS clients perform TLS session resumption. So, if you choose to enforce TLS session resumption, you
* prevent any connections from FTPS clients that don't perform the protocol negotiation. To determine whether or
* not you can use the <code>ENFORCED</code> value, you need to test your clients.
* </p>
* </note></li>
* </ul>
*
* @param tlsSessionResumptionMode
* A property used with Transfer servers that use the FTPS protocol. TLS Session Resumption provides a
* mechanism to resume or share a negotiated secret key between the control and data connection for an FTPS
* session. <code>TlsSessionResumptionMode</code> determines whether or not the server resumes recent,
* negotiated sessions through a unique session ID. This property is available during
* <code>CreateServer</code> and <code>UpdateServer</code> calls. If a <code>TlsSessionResumptionMode</code>
* value is not specified during CreateServer, it is set to <code>ENFORCED</code> by default.</p>
* <ul>
* <li>
* <p>
* <code>DISABLED</code>: the server does not process TLS session resumption client requests and creates a
* new TLS session for each request.
* </p>
* </li>
* <li>
* <p>
* <code>ENABLED</code>: the server processes and accepts clients that are performing TLS session resumption.
* The server doesn't reject client data connections that do not perform the TLS session resumption client
* processing.
* </p>
* </li>
* <li>
* <p>
* <code>ENFORCED</code>: the server processes and accepts clients that are performing TLS session
* resumption. The server rejects client data connections that do not perform the TLS session resumption
* client processing. Before you set the value to <code>ENFORCED</code>, test your clients.
* </p>
* <note>
* <p>
* Not all FTPS clients perform TLS session resumption. So, if you choose to enforce TLS session resumption,
* you prevent any connections from FTPS clients that don't perform the protocol negotiation. To determine
* whether or not you can use the <code>ENFORCED</code> value, you need to test your clients.
* </p>
* </note></li>
* @see TlsSessionResumptionMode
*/
public void setTlsSessionResumptionMode(String tlsSessionResumptionMode) {
this.tlsSessionResumptionMode = tlsSessionResumptionMode;
}
/**
* <p>
* A property used with Transfer servers that use the FTPS protocol. TLS Session Resumption provides a mechanism to
* resume or share a negotiated secret key between the control and data connection for an FTPS session.
* <code>TlsSessionResumptionMode</code> determines whether or not the server resumes recent, negotiated sessions
* through a unique session ID. This property is available during <code>CreateServer</code> and
* <code>UpdateServer</code> calls. If a <code>TlsSessionResumptionMode</code> value is not specified during
* CreateServer, it is set to <code>ENFORCED</code> by default.
* </p>
* <ul>
* <li>
* <p>
* <code>DISABLED</code>: the server does not process TLS session resumption client requests and creates a new TLS
* session for each request.
* </p>
* </li>
* <li>
* <p>
* <code>ENABLED</code>: the server processes and accepts clients that are performing TLS session resumption. The
* server doesn't reject client data connections that do not perform the TLS session resumption client processing.
* </p>
* </li>
* <li>
* <p>
* <code>ENFORCED</code>: the server processes and accepts clients that are performing TLS session resumption. The
* server rejects client data connections that do not perform the TLS session resumption client processing. Before
* you set the value to <code>ENFORCED</code>, test your clients.
* </p>
* <note>
* <p>
* Not all FTPS clients perform TLS session resumption. So, if you choose to enforce TLS session resumption, you
* prevent any connections from FTPS clients that don't perform the protocol negotiation. To determine whether or
* not you can use the <code>ENFORCED</code> value, you need to test your clients.
* </p>
* </note></li>
* </ul>
*
* @return A property used with Transfer servers that use the FTPS protocol. TLS Session Resumption provides a
* mechanism to resume or share a negotiated secret key between the control and data connection for an FTPS
* session. <code>TlsSessionResumptionMode</code> determines whether or not the server resumes recent,
* negotiated sessions through a unique session ID. This property is available during
* <code>CreateServer</code> and <code>UpdateServer</code> calls. If a <code>TlsSessionResumptionMode</code>
* value is not specified during CreateServer, it is set to <code>ENFORCED</code> by default.</p>
* <ul>
* <li>
* <p>
* <code>DISABLED</code>: the server does not process TLS session resumption client requests and creates a
* new TLS session for each request.
* </p>
* </li>
* <li>
* <p>
* <code>ENABLED</code>: the server processes and accepts clients that are performing TLS session
* resumption. The server doesn't reject client data connections that do not perform the TLS session
* resumption client processing.
* </p>
* </li>
* <li>
* <p>
* <code>ENFORCED</code>: the server processes and accepts clients that are performing TLS session
* resumption. The server rejects client data connections that do not perform the TLS session resumption
* client processing. Before you set the value to <code>ENFORCED</code>, test your clients.
* </p>
* <note>
* <p>
* Not all FTPS clients perform TLS session resumption. So, if you choose to enforce TLS session resumption,
* you prevent any connections from FTPS clients that don't perform the protocol negotiation. To determine
* whether or not you can use the <code>ENFORCED</code> value, you need to test your clients.
* </p>
* </note></li>
* @see TlsSessionResumptionMode
*/
public String getTlsSessionResumptionMode() {
return this.tlsSessionResumptionMode;
}
/**
* <p>
* A property used with Transfer servers that use the FTPS protocol. TLS Session Resumption provides a mechanism to
* resume or share a negotiated secret key between the control and data connection for an FTPS session.
* <code>TlsSessionResumptionMode</code> determines whether or not the server resumes recent, negotiated sessions
* through a unique session ID. This property is available during <code>CreateServer</code> and
* <code>UpdateServer</code> calls. If a <code>TlsSessionResumptionMode</code> value is not specified during
* CreateServer, it is set to <code>ENFORCED</code> by default.
* </p>
* <ul>
* <li>
* <p>
* <code>DISABLED</code>: the server does not process TLS session resumption client requests and creates a new TLS
* session for each request.
* </p>
* </li>
* <li>
* <p>
* <code>ENABLED</code>: the server processes and accepts clients that are performing TLS session resumption. The
* server doesn't reject client data connections that do not perform the TLS session resumption client processing.
* </p>
* </li>
* <li>
* <p>
* <code>ENFORCED</code>: the server processes and accepts clients that are performing TLS session resumption. The
* server rejects client data connections that do not perform the TLS session resumption client processing. Before
* you set the value to <code>ENFORCED</code>, test your clients.
* </p>
* <note>
* <p>
* Not all FTPS clients perform TLS session resumption. So, if you choose to enforce TLS session resumption, you
* prevent any connections from FTPS clients that don't perform the protocol negotiation. To determine whether or
* not you can use the <code>ENFORCED</code> value, you need to test your clients.
* </p>
* </note></li>
* </ul>
*
* @param tlsSessionResumptionMode
* A property used with Transfer servers that use the FTPS protocol. TLS Session Resumption provides a
* mechanism to resume or share a negotiated secret key between the control and data connection for an FTPS
* session. <code>TlsSessionResumptionMode</code> determines whether or not the server resumes recent,
* negotiated sessions through a unique session ID. This property is available during
* <code>CreateServer</code> and <code>UpdateServer</code> calls. If a <code>TlsSessionResumptionMode</code>
* value is not specified during CreateServer, it is set to <code>ENFORCED</code> by default.</p>
* <ul>
* <li>
* <p>
* <code>DISABLED</code>: the server does not process TLS session resumption client requests and creates a
* new TLS session for each request.
* </p>
* </li>
* <li>
* <p>
* <code>ENABLED</code>: the server processes and accepts clients that are performing TLS session resumption.
* The server doesn't reject client data connections that do not perform the TLS session resumption client
* processing.
* </p>
* </li>
* <li>
* <p>
* <code>ENFORCED</code>: the server processes and accepts clients that are performing TLS session
* resumption. The server rejects client data connections that do not perform the TLS session resumption
* client processing. Before you set the value to <code>ENFORCED</code>, test your clients.
* </p>
* <note>
* <p>
* Not all FTPS clients perform TLS session resumption. So, if you choose to enforce TLS session resumption,
* you prevent any connections from FTPS clients that don't perform the protocol negotiation. To determine
* whether or not you can use the <code>ENFORCED</code> value, you need to test your clients.
* </p>
* </note></li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see TlsSessionResumptionMode
*/
public ProtocolDetails withTlsSessionResumptionMode(String tlsSessionResumptionMode) {
setTlsSessionResumptionMode(tlsSessionResumptionMode);
return this;
}
/**
* <p>
* A property used with Transfer servers that use the FTPS protocol. TLS Session Resumption provides a mechanism to
* resume or share a negotiated secret key between the control and data connection for an FTPS session.
* <code>TlsSessionResumptionMode</code> determines whether or not the server resumes recent, negotiated sessions
* through a unique session ID. This property is available during <code>CreateServer</code> and
* <code>UpdateServer</code> calls. If a <code>TlsSessionResumptionMode</code> value is not specified during
* CreateServer, it is set to <code>ENFORCED</code> by default.
* </p>
* <ul>
* <li>
* <p>
* <code>DISABLED</code>: the server does not process TLS session resumption client requests and creates a new TLS
* session for each request.
* </p>
* </li>
* <li>
* <p>
* <code>ENABLED</code>: the server processes and accepts clients that are performing TLS session resumption. The
* server doesn't reject client data connections that do not perform the TLS session resumption client processing.
* </p>
* </li>
* <li>
* <p>
* <code>ENFORCED</code>: the server processes and accepts clients that are performing TLS session resumption. The
* server rejects client data connections that do not perform the TLS session resumption client processing. Before
* you set the value to <code>ENFORCED</code>, test your clients.
* </p>
* <note>
* <p>
* Not all FTPS clients perform TLS session resumption. So, if you choose to enforce TLS session resumption, you
* prevent any connections from FTPS clients that don't perform the protocol negotiation. To determine whether or
* not you can use the <code>ENFORCED</code> value, you need to test your clients.
* </p>
* </note></li>
* </ul>
*
* @param tlsSessionResumptionMode
* A property used with Transfer servers that use the FTPS protocol. TLS Session Resumption provides a
* mechanism to resume or share a negotiated secret key between the control and data connection for an FTPS
* session. <code>TlsSessionResumptionMode</code> determines whether or not the server resumes recent,
* negotiated sessions through a unique session ID. This property is available during
* <code>CreateServer</code> and <code>UpdateServer</code> calls. If a <code>TlsSessionResumptionMode</code>
* value is not specified during CreateServer, it is set to <code>ENFORCED</code> by default.</p>
* <ul>
* <li>
* <p>
* <code>DISABLED</code>: the server does not process TLS session resumption client requests and creates a
* new TLS session for each request.
* </p>
* </li>
* <li>
* <p>
* <code>ENABLED</code>: the server processes and accepts clients that are performing TLS session resumption.
* The server doesn't reject client data connections that do not perform the TLS session resumption client
* processing.
* </p>
* </li>
* <li>
* <p>
* <code>ENFORCED</code>: the server processes and accepts clients that are performing TLS session
* resumption. The server rejects client data connections that do not perform the TLS session resumption
* client processing. Before you set the value to <code>ENFORCED</code>, test your clients.
* </p>
* <note>
* <p>
* Not all FTPS clients perform TLS session resumption. So, if you choose to enforce TLS session resumption,
* you prevent any connections from FTPS clients that don't perform the protocol negotiation. To determine
* whether or not you can use the <code>ENFORCED</code> value, you need to test your clients.
* </p>
* </note></li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see TlsSessionResumptionMode
*/
public ProtocolDetails withTlsSessionResumptionMode(TlsSessionResumptionMode tlsSessionResumptionMode) {
this.tlsSessionResumptionMode = tlsSessionResumptionMode.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPassiveIp() != null)
sb.append("PassiveIp: ").append(getPassiveIp()).append(",");
if (getTlsSessionResumptionMode() != null)
sb.append("TlsSessionResumptionMode: ").append(getTlsSessionResumptionMode());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ProtocolDetails == false)
return false;
ProtocolDetails other = (ProtocolDetails) obj;
if (other.getPassiveIp() == null ^ this.getPassiveIp() == null)
return false;
if (other.getPassiveIp() != null && other.getPassiveIp().equals(this.getPassiveIp()) == false)
return false;
if (other.getTlsSessionResumptionMode() == null ^ this.getTlsSessionResumptionMode() == null)
return false;
if (other.getTlsSessionResumptionMode() != null && other.getTlsSessionResumptionMode().equals(this.getTlsSessionResumptionMode()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getPassiveIp() == null) ? 0 : getPassiveIp().hashCode());
hashCode = prime * hashCode + ((getTlsSessionResumptionMode() == null) ? 0 : getTlsSessionResumptionMode().hashCode());
return hashCode;
}
@Override
public ProtocolDetails clone() {
try {
return (ProtocolDetails) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.transfer.model.transform.ProtocolDetailsMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2019 Oath Holdings Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yahoo.athenz.auth.token;
import ch.qos.logback.core.net.ssl.SSL;
import com.yahoo.athenz.auth.token.jwts.JwtsSigningKeyResolver;
import com.yahoo.athenz.auth.token.jwts.MockJwtsSigningKeyResolver;
import com.yahoo.athenz.auth.util.Crypto;
import com.yahoo.athenz.auth.util.CryptoException;
import io.jsonwebtoken.*;
import org.mockito.Mockito;
import org.testng.annotations.Test;
import javax.net.ssl.SSLContext;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.Base64;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import static org.testng.Assert.*;
public class AccessTokenTest {
private final File ecPrivateKey = new File("./src/test/resources/ec_private.key");
private final File ecPublicKey = new File("./src/test/resources/ec_public.key");
private final String JWT_KEYS = "{\"keys\":[{\"kty\":\"RSA\",\"kid\":\"0\",\"alg\":\"RS256\","
+ "\"use\":\"sig\",\"n\":\"AMV3cnZXxYJL-A0TYY8Fy245HKSOBCYt9atNAUQVtbEwx9QaZGj8moYIe4nXgx"
+ "72Ktwg0Gruh8sS7GQLBizCXg7fCk62sDV_MZINnwON9gsKbxxgn9mLFeYSaatUzk-VRphDoHNIBC-qeDtYnZhs"
+ "HYcV9Jp0GPkLNquhN1TXA7gT\",\"e\":\"AQAB\"},{\"kty\":\"EC\",\"kid\":\"eckey1\",\"alg\":"
+ "\"ES256\",\"use\":\"sig\",\"crv\":\"prime256v1\",\"x\":\"AI0x6wEUk5T0hslaT83DNVy5r98Xn"
+ "G7HAjQynjCrcdCe\",\"y\":\"ATdV2ebpefqBli_SXZwvL3-7OiD3MTryGbR-zRSFZ_s=\"},"
+ "{\"kty\":\"ATHENZ\",\"alg\":\"ES256\"}]}";
AccessToken createAccessToken(long now) {
AccessToken accessToken = new AccessToken();
accessToken.setAuthTime(now);
accessToken.setScope(Collections.singletonList("readers"));
accessToken.setSubject("subject");
accessToken.setUserId("userid");
accessToken.setExpiryTime(now + 3600);
accessToken.setIssueTime(now);
accessToken.setClientId("clientid");
accessToken.setAudience("coretech");
accessToken.setVersion(1);
accessToken.setIssuer("athenz");
accessToken.setProxyPrincipal("proxy.user");
accessToken.setConfirmEntry("x5t#uri", "spiffe://athenz/sa/api");
try {
Path path = Paths.get("src/test/resources/mtls_token_spec.cert");
String certStr = new String(Files.readAllBytes(path));
X509Certificate cert = Crypto.loadX509Certificate(certStr);
accessToken.setConfirmX509CertHash(cert);
} catch (IOException ignored) {
fail();
}
return accessToken;
}
void validateAccessToken(AccessToken accessToken, long now) {
assertEquals(now, accessToken.getAuthTime());
assertEquals(1, accessToken.getScope().size());
assertTrue(accessToken.getScope().contains("readers"));
assertEquals("subject", accessToken.getSubject());
assertEquals("userid", accessToken.getUserId());
assertEquals(now + 3600, accessToken.getExpiryTime());
assertEquals(now, accessToken.getIssueTime());
assertEquals("clientid", accessToken.getClientId());
assertEquals("coretech", accessToken.getAudience());
assertEquals(1, accessToken.getVersion());
assertEquals("athenz", accessToken.getIssuer());
assertEquals("proxy.user", accessToken.getProxyPrincipal());
LinkedHashMap<String, Object> confirm = accessToken.getConfirm();
assertNotNull(confirm);
assertEquals("A4DtL2JmUMhAsvJj5tKyn64SqzmuXbMrJa0n761y5v0", confirm.get("x5t#S256"));
assertEquals("A4DtL2JmUMhAsvJj5tKyn64SqzmuXbMrJa0n761y5v0", accessToken.getConfirmEntry("x5t#S256"));
assertEquals("spiffe://athenz/sa/api", confirm.get("x5t#uri"));
assertEquals("spiffe://athenz/sa/api", accessToken.getConfirmEntry("x5t#uri"));
assertNull(accessToken.getConfirmEntry("unknown"));
try {
Path path = Paths.get("src/test/resources/mtls_token_spec.cert");
String certStr = new String(Files.readAllBytes(path));
X509Certificate cert = Crypto.loadX509Certificate(certStr);
assertTrue(accessToken.confirmX509CertHash(cert));
} catch (IOException ignored) {
fail();
}
}
@Test
public void testAccessToken() {
long now = System.currentTimeMillis() / 1000;
AccessToken accessToken = createAccessToken(now);
// verify the getters
validateAccessToken(accessToken, now);
// now get the signed token
PrivateKey privateKey = Crypto.loadPrivateKey(ecPrivateKey);
String accessJws = accessToken.getSignedToken(privateKey, "eckey1", SignatureAlgorithm.ES256);
assertNotNull(accessJws);
// now verify our signed token
PublicKey publicKey = Crypto.loadPublicKey(ecPublicKey);
Jws<Claims> claims = Jwts.parser().setSigningKey(publicKey).parseClaimsJws(accessJws);
assertNotNull(claims);
assertEquals("subject", claims.getBody().getSubject());
assertEquals("coretech", claims.getBody().getAudience());
assertEquals("athenz", claims.getBody().getIssuer());
List<String> scopes = (List<String>) claims.getBody().get("scp");
assertNotNull(scopes);
assertEquals(1, scopes.size());
assertEquals("readers", scopes.get(0));
}
@Test
public void testAccessTokenSignedToken() {
long now = System.currentTimeMillis() / 1000;
AccessToken accessToken = createAccessToken(now);
// now get the signed token
PrivateKey privateKey = Crypto.loadPrivateKey(ecPrivateKey);
String accessJws = accessToken.getSignedToken(privateKey, "eckey1", SignatureAlgorithm.ES256);
assertNotNull(accessJws);
// now verify our signed token
JwtsSigningKeyResolver resolver = new JwtsSigningKeyResolver(null, null);
resolver.addPublicKey("eckey1", Crypto.loadPublicKey(ecPublicKey));
AccessToken checkToken = new AccessToken(accessJws, resolver);
validateAccessToken(checkToken, now);
}
@Test
public void testAccessTokenSignedTokenPublicKey() {
long now = System.currentTimeMillis() / 1000;
AccessToken accessToken = createAccessToken(now);
// now get the signed token
PrivateKey privateKey = Crypto.loadPrivateKey(ecPrivateKey);
String accessJws = accessToken.getSignedToken(privateKey, "eckey1", SignatureAlgorithm.ES256);
assertNotNull(accessJws);
// now verify our signed token
AccessToken checkToken = new AccessToken(accessJws, Crypto.loadPublicKey(ecPublicKey));
validateAccessToken(checkToken, now);
}
@Test
public void testAccessTokenSignedTokenConfigFile() {
long now = System.currentTimeMillis() / 1000;
AccessToken accessToken = createAccessToken(now);
// now get the signed token
PrivateKey privateKey = Crypto.loadPrivateKey(ecPrivateKey);
String accessJws = accessToken.getSignedToken(privateKey, "eckey1", SignatureAlgorithm.ES256);
assertNotNull(accessJws);
// now verify our signed token
final String oldConf = System.setProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF,
"src/test/resources/athenz.conf");
JwtsSigningKeyResolver resolver = new JwtsSigningKeyResolver(null, null);
AccessToken checkToken = new AccessToken(accessJws, resolver);
validateAccessToken(checkToken, now);
if (oldConf == null) {
System.clearProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF);
} else {
System.setProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF, oldConf);
}
}
@Test
public void testAccessTokenSignedTokenConfigFileUnknownKey() {
testAccessTokenSignedTokenConfigFileNoKeys("src/test/resources/athenz.conf");
testAccessTokenSignedTokenConfigFileNoKeys("src/test/resources/athenz-no-keys.conf");
testAccessTokenSignedTokenConfigFileNoKeys("src/test/resources/athenz-no-valid-keys.conf");
testAccessTokenSignedTokenConfigFileNoKeys("");
// passing invalid file that will generate parse exception
testAccessTokenSignedTokenConfigFileNoKeys("arg_file");
}
void testAccessTokenSignedTokenConfigFileNoKeys(final String confPath) {
long now = System.currentTimeMillis() / 1000;
AccessToken accessToken = createAccessToken(now);
// now get the signed token
PrivateKey privateKey = Crypto.loadPrivateKey(ecPrivateKey);
String accessJws = accessToken.getSignedToken(privateKey, "eckey99", SignatureAlgorithm.ES256);
assertNotNull(accessJws);
// now verify our signed token
final String oldConf = System.setProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF,
confPath);
JwtsSigningKeyResolver resolver = new JwtsSigningKeyResolver(null, null);
try {
new AccessToken(accessJws, resolver);
fail();
} catch (Exception ex) {
}
if (oldConf == null) {
System.clearProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF);
} else {
System.setProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF, oldConf);
}
}
@Test
public void testAccessTokenSignedTokenServerKeys() {
long now = System.currentTimeMillis() / 1000;
AccessToken accessToken = createAccessToken(now);
// now get the signed token
PrivateKey privateKey = Crypto.loadPrivateKey(ecPrivateKey);
String accessJws = accessToken.getSignedToken(privateKey, "eckey1", SignatureAlgorithm.ES256);
assertNotNull(accessJws);
// now verify our signed token
final String oldConf = System.setProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF,
"src/test/resources/athenz-no-keys.conf");
MockJwtsSigningKeyResolver.setResponseCode(200);
MockJwtsSigningKeyResolver.setResponseBody(JWT_KEYS);
MockJwtsSigningKeyResolver resolver = new MockJwtsSigningKeyResolver("https://localhost:4443", null);
AccessToken checkToken = new AccessToken(accessJws, resolver);
validateAccessToken(checkToken, now);
if (oldConf == null) {
System.clearProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF);
} else {
System.setProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF, oldConf);
}
}
@Test
public void testAccessTokenSignedTokenServerKeysFailure() {
long now = System.currentTimeMillis() / 1000;
AccessToken accessToken = createAccessToken(now);
// now get the signed token
PrivateKey privateKey = Crypto.loadPrivateKey(ecPrivateKey);
String accessJws = accessToken.getSignedToken(privateKey, "eckey1", SignatureAlgorithm.ES256);
assertNotNull(accessJws);
// now verify our signed token
final String oldConf = System.setProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF,
"src/test/resources/athenz-no-keys.conf");
MockJwtsSigningKeyResolver.setResponseCode(401);
MockJwtsSigningKeyResolver.setResponseBody("");
SSLContext sslContext = Mockito.mock(SSLContext.class);
MockJwtsSigningKeyResolver resolver = new MockJwtsSigningKeyResolver("https://localhost:4443", sslContext);
try {
new AccessToken(accessJws, resolver);
fail();
} catch (Exception ex) {
assertTrue(ex instanceof IllegalArgumentException, ex.getMessage());
}
if (oldConf == null) {
System.clearProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF);
} else {
System.setProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF, oldConf);
}
}
@Test
public void testAccessTokenExpired() {
long now = System.currentTimeMillis() / 1000;
// we allow clock skew of 60 seconds so we'll go
// back 3600 + 61 to make our token expired
AccessToken accessToken = createAccessToken(now - 3661);
// now get the signed token
PrivateKey privateKey = Crypto.loadPrivateKey(ecPrivateKey);
String accessJws = accessToken.getSignedToken(privateKey, "eckey1", SignatureAlgorithm.ES256);
assertNotNull(accessJws);
// now verify our signed token
final String oldConf = System.setProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF,
"src/test/resources/athenz.conf");
JwtsSigningKeyResolver resolver = new JwtsSigningKeyResolver(null, null);
try {
new AccessToken(accessJws, resolver);
fail();
} catch (Exception ex) {
assertTrue(ex.getMessage().contains("expired"));
}
if (oldConf == null) {
System.clearProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF);
} else {
System.setProperty(JwtsSigningKeyResolver.ZTS_PROP_ATHENZ_CONF, oldConf);
}
}
@Test
public void testAccessTokenNullConfirm() {
AccessToken accessToken = new AccessToken();
assertNull(accessToken.getConfirm());
assertNull(accessToken.getConfirmEntry("key"));
}
@Test
public void testGetX509CertificateHash() throws CertificateEncodingException {
X509Certificate mockCert = Mockito.mock(X509Certificate.class);
Mockito.when(mockCert.getEncoded()).thenThrow(new CryptoException());
AccessToken accessToken = new AccessToken();
assertNull(accessToken.getX509CertificateHash(mockCert));
}
@Test
public void testConfirmX509CertHashFailure() {
long now = System.currentTimeMillis() / 1000;
AccessToken accessToken = createAccessToken(now);
try {
Path path = Paths.get("src/test/resources/valid_cn_x509.cert");
String certStr = new String(Files.readAllBytes(path));
X509Certificate cert = Crypto.loadX509Certificate(certStr);
assertFalse(accessToken.confirmX509CertHash(cert));
} catch (IOException ignored) {
fail();
}
}
@Test
public void testConfirmX509CertHashNoHash() {
AccessToken accessToken = new AccessToken();
accessToken.setVersion(1);
accessToken.setIssuer("athenz");
accessToken.setConfirmEntry("x5t#uri", "spiffe://athenz/sa/api");
try {
Path path = Paths.get("src/test/resources/valid_cn_x509.cert");
String certStr = new String(Files.readAllBytes(path));
X509Certificate cert = Crypto.loadX509Certificate(certStr);
assertFalse(accessToken.confirmX509CertHash(cert));
} catch (IOException ignored) {
fail();
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.painless.spi;
import org.elasticsearch.painless.spi.annotation.WhitelistAnnotationParser;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.nio.charset.StandardCharsets;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** Loads and creates a {@link Whitelist} from one to many text files. */
public final class WhitelistLoader {
/**
* Loads and creates a {@link Whitelist} from one to many text files using only the base annotation parsers.
* See {@link #loadFromResourceFiles(Class, Map, String...)} for information on how to structure a whitelist
* text file.
*/
public static Whitelist loadFromResourceFiles(Class<?> resource, String... filepaths) {
return loadFromResourceFiles(resource, WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, filepaths);
}
/**
* Loads and creates a {@link Whitelist} from one to many text files. The file paths are passed in as an array of
* {@link String}s with a single {@link Class} to be be used to load the resources where each {@link String}
* is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java
* reflection objects for each individual {@link Class}, {@link Constructor}, {@link Method}, and {@link Field}
* specified as part of the whitelist in the text file.
*
* A single pass is made through each file to collect all the information about each class, constructor, method,
* and field. Most validation will be done at a later point after all whitelists have been gathered and their
* merging takes place.
*
* A painless type name is one of the following:
* <ul>
* <li> def - The Painless dynamic type which is automatically included without a need to be
* whitelisted. </li>
* <li> fully-qualified Java type name - Any whitelisted Java class will have the equivalent name as
* a Painless type name with the exception that any dollar symbols used as part of inner classes will
* be replaced with dot symbols. </li>
* <li> short Java type name - The text after the final dot symbol of any specified Java class. A
* short type Java name may be excluded by using the 'no_import' attribute during Painless class parsing
* as described later. </li>
* </ul>
*
* The following can be parsed from each whitelist text file:
* <ul>
* <li> Blank lines will be ignored by the parser. </li>
* <li> Comments may be created starting with a pound '#' symbol and end with a newline. These will
* be ignored by the parser. </li>
* <li> Primitive types may be specified starting with 'class' and followed by the Java type name,
* an opening bracket, a newline, a closing bracket, and a final newline. </li>
* <li> Complex types may be specified starting with 'class' and followed by the fully-qualified Java
* class name, optionally followed by a 'no_import' attribute, an opening bracket, a newline,
* constructor/method/field specifications, a closing bracket, and a final newline. Within a complex
* type the following may be parsed:
* <ul>
* <li> A constructor may be specified starting with an opening parenthesis, followed by a
* comma-delimited list of Painless type names corresponding to the type/class names for
* the equivalent Java parameter types (these must be whitelisted as well), a closing
* parenthesis, and a newline. </li>
* <li> A method may be specified starting with a Painless type name for the return type,
* followed by the Java name of the method (which will also be the Painless name for the
* method), an opening parenthesis, a comma-delimited list of Painless type names
* corresponding to the type/class names for the equivalent Java parameter types
* (these must be whitelisted as well), a closing parenthesis, and a newline. </li>
* <li> An augmented method may be specified starting with a Painless type name for the return
* type, followed by the fully qualified Java name of the class the augmented method is
* part of (this class does not need to be whitelisted), the Java name of the method
* (which will also be the Painless name for the method), an opening parenthesis, a
* comma-delimited list of Painless type names corresponding to the type/class names
* for the equivalent Java parameter types (these must be whitelisted as well), a closing
* parenthesis, and a newline. </li>
* <li>A field may be specified starting with a Painless type name for the equivalent Java type
* of the field, followed by the Java name of the field (which all be the Painless name
* for the field), and a newline. </li>
* </ul>
* <li> Annotations may be added starting with an at, followed by a name, optionally an opening brace,
* a parameter name, an equals, an opening quote, an argument value, a closing quote, (possibly repeated
* for multiple arguments,) and a closing brace. Multiple annotations may be added after a class (before
* the opening bracket), after a method, or after field. </li>
* </ul>
*
* Note there must be a one-to-one correspondence of Painless type names to Java type/class names.
* If the same Painless type is defined across multiple files and the Java class is the same, all
* specified constructors, methods, and fields will be merged into a single Painless type. The
* Painless dynamic type, 'def', used as part of constructor, method, and field definitions will
* be appropriately parsed and handled. Painless complex types must be specified with the
* fully-qualified Java class name. Method argument types, method return types, and field types
* must be specified with Painless type names (def, fully-qualified, or short) as described earlier.
*
* The following example is used to create a single whitelist text file:
*
* {@code
* # primitive types
*
* class int -> int {
* }
*
* # complex types
*
* class my.package.Example @no_import {
* # constructors
* ()
* (int)
* (def, def)
* (Example, def)
*
* # method
* Example add(int, def)
* int add(Example, Example)
* void example() @deprecated[use example 2 instead]
* void example2()
*
* # augmented
* Example some.other.Class sub(Example, int, def)
*
* # fields
* int value0
* int value1
* def value2
* }
* }
*/
public static Whitelist loadFromResourceFiles(Class<?> resource, Map<String, WhitelistAnnotationParser> parsers, String... filepaths) {
List<WhitelistClass> whitelistClasses = new ArrayList<>();
List<WhitelistMethod> whitelistStatics = new ArrayList<>();
List<WhitelistClassBinding> whitelistClassBindings = new ArrayList<>();
// Execute a single pass through the whitelist text files. This will gather all the
// constructors, methods, augmented methods, and fields for each whitelisted class.
for (String filepath : filepaths) {
String line;
int number = -1;
try (LineNumberReader reader = new LineNumberReader(
new InputStreamReader(resource.getResourceAsStream(filepath), StandardCharsets.UTF_8))) {
String parseType = null;
String whitelistClassOrigin = null;
String javaClassName = null;
List<WhitelistConstructor> whitelistConstructors = null;
List<WhitelistMethod> whitelistMethods = null;
List<WhitelistField> whitelistFields = null;
List<Object> classAnnotations = null;
while ((line = reader.readLine()) != null) {
number = reader.getLineNumber();
line = line.trim();
// Skip any lines that are either blank or comments.
if (line.length() == 0 || line.charAt(0) == '#') {
continue;
}
// Handle a new class by resetting all the variables necessary to construct a new WhitelistClass for the whitelist.
// Expects the following format: 'class' ID annotations? '{' '\n'
if (line.startsWith("class ")) {
// Ensure the final token of the line is '{'.
if (line.endsWith("{") == false) {
throw new IllegalArgumentException(
"invalid class definition: failed to parse class opening bracket [" + line + "]");
}
if (parseType != null) {
throw new IllegalArgumentException("invalid definition: cannot embed class definition [" + line + "]");
}
// Parse the Java class name and annotations if they exist.
int annotationIndex = line.indexOf('@');
if (annotationIndex == -1) {
annotationIndex = line.length() - 1;
classAnnotations = Collections.emptyList();
} else {
classAnnotations = parseWhitelistAnnotations(parsers, line.substring(annotationIndex, line.length() - 1));
}
parseType = "class";
whitelistClassOrigin = "[" + filepath + "]:[" + number + "]";
javaClassName = line.substring(5, annotationIndex).trim();
// Reset all the constructors, methods, and fields to support a new class.
whitelistConstructors = new ArrayList<>();
whitelistMethods = new ArrayList<>();
whitelistFields = new ArrayList<>();
} else if (line.startsWith("static_import ")) {
// Ensure the final token of the line is '{'.
if (line.endsWith("{") == false) {
throw new IllegalArgumentException(
"invalid static import definition: failed to parse static import opening bracket [" + line + "]");
}
if (parseType != null) {
throw new IllegalArgumentException("invalid definition: cannot embed static import definition [" + line + "]");
}
parseType = "static_import";
// Handle the end of a definition and reset all previously gathered values.
// Expects the following format: '}' '\n'
} else if (line.equals("}")) {
if (parseType == null) {
throw new IllegalArgumentException("invalid definition: extraneous closing bracket");
}
// Create a new WhitelistClass with all the previously gathered constructors, methods,
// augmented methods, and fields, and add it to the list of whitelisted classes.
if ("class".equals(parseType)) {
whitelistClasses.add(new WhitelistClass(whitelistClassOrigin, javaClassName,
whitelistConstructors, whitelistMethods, whitelistFields, classAnnotations));
whitelistClassOrigin = null;
javaClassName = null;
whitelistConstructors = null;
whitelistMethods = null;
whitelistFields = null;
classAnnotations = null;
}
// Reset the parseType.
parseType = null;
// Handle static import definition types.
// Expects the following format: ID ID '(' ( ID ( ',' ID )* )? ')' ( 'from_class' | 'bound_to' ) ID annotations? '\n'
} else if ("static_import".equals(parseType)) {
// Mark the origin of this parsable object.
String origin = "[" + filepath + "]:[" + number + "]";
// Parse the tokens prior to the method parameters.
int parameterStartIndex = line.indexOf('(');
if (parameterStartIndex == -1) {
throw new IllegalArgumentException(
"illegal static import definition: start of method parameters not found [" + line + "]");
}
String[] tokens = line.substring(0, parameterStartIndex).trim().split("\\s+");
String methodName;
// Based on the number of tokens, look up the Java method name.
if (tokens.length == 2) {
methodName = tokens[1];
} else {
throw new IllegalArgumentException("invalid method definition: unexpected format [" + line + "]");
}
String returnCanonicalTypeName = tokens[0];
// Parse the method parameters.
int parameterEndIndex = line.indexOf(')');
if (parameterEndIndex == -1) {
throw new IllegalArgumentException(
"illegal static import definition: end of method parameters not found [" + line + "]");
}
String[] canonicalTypeNameParameters =
line.substring(parameterStartIndex + 1, parameterEndIndex).replaceAll("\\s+", "").split(",");
// Handle the case for a method with no parameters.
if ("".equals(canonicalTypeNameParameters[0])) {
canonicalTypeNameParameters = new String[0];
}
// Parse the annotations if they exist.
List<Object> annotations;
int annotationIndex = line.indexOf('@');
if (annotationIndex == -1) {
annotationIndex = line.length();
annotations = Collections.emptyList();
} else {
annotations = parseWhitelistAnnotations(parsers, line.substring(annotationIndex));
}
// Parse the static import type and class.
tokens = line.substring(parameterEndIndex + 1, annotationIndex).trim().split("\\s+");
String staticImportType;
String targetJavaClassName;
// Based on the number of tokens, look up the type and class.
if (tokens.length == 2) {
staticImportType = tokens[0];
targetJavaClassName = tokens[1];
} else {
throw new IllegalArgumentException("invalid static import definition: unexpected format [" + line + "]");
}
// Add a static import method or binding depending on the static import type.
if ("from_class".equals(staticImportType)) {
whitelistStatics.add(new WhitelistMethod(origin, targetJavaClassName,
methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters),
annotations));
} else if ("bound_to".equals(staticImportType)) {
whitelistClassBindings.add(new WhitelistClassBinding(origin, targetJavaClassName,
methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters),
annotations));
} else {
throw new IllegalArgumentException("invalid static import definition: " +
"unexpected static import type [" + staticImportType + "] [" + line + "]");
}
// Handle class definition types.
} else if ("class".equals(parseType)) {
// Mark the origin of this parsable object.
String origin = "[" + filepath + "]:[" + number + "]";
// Handle the case for a constructor definition.
// Expects the following format: '(' ( ID ( ',' ID )* )? ')' annotations? '\n'
if (line.startsWith("(")) {
// Parse the constructor parameters.
int parameterEndIndex = line.indexOf(')');
if (parameterEndIndex == -1) {
throw new IllegalArgumentException(
"illegal constructor definition: end of constructor parameters not found [" + line + "]");
}
String[] canonicalTypeNameParameters = line.substring(1, parameterEndIndex).replaceAll("\\s+", "").split(",");
// Handle the case for a constructor with no parameters.
if ("".equals(canonicalTypeNameParameters[0])) {
canonicalTypeNameParameters = new String[0];
}
// Parse the annotations if they exist.
List<Object> annotations;
int annotationIndex = line.indexOf('@');
annotations = annotationIndex == -1 ?
Collections.emptyList() : parseWhitelistAnnotations(parsers, line.substring(annotationIndex));
whitelistConstructors.add(new WhitelistConstructor(
origin, Arrays.asList(canonicalTypeNameParameters), annotations));
// Handle the case for a method or augmented method definition.
// Expects the following format: ID ID? ID '(' ( ID ( ',' ID )* )? ')' annotations? '\n'
} else if (line.contains("(")) {
// Parse the tokens prior to the method parameters.
int parameterStartIndex = line.indexOf('(');
String[] tokens = line.substring(0, parameterStartIndex).trim().split("\\s+");
String methodName;
String javaAugmentedClassName;
// Based on the number of tokens, look up the Java method name and if provided the Java augmented class.
if (tokens.length == 2) {
methodName = tokens[1];
javaAugmentedClassName = null;
} else if (tokens.length == 3) {
methodName = tokens[2];
javaAugmentedClassName = tokens[1];
} else {
throw new IllegalArgumentException("invalid method definition: unexpected format [" + line + "]");
}
String returnCanonicalTypeName = tokens[0];
// Parse the method parameters.
int parameterEndIndex = line.indexOf(')');
if (parameterEndIndex == -1) {
throw new IllegalArgumentException(
"illegal static import definition: end of method parameters not found [" + line + "]");
}
String[] canonicalTypeNameParameters =
line.substring(parameterStartIndex + 1, parameterEndIndex).replaceAll("\\s+", "").split(",");
// Handle the case for a method with no parameters.
if ("".equals(canonicalTypeNameParameters[0])) {
canonicalTypeNameParameters = new String[0];
}
// Parse the annotations if they exist.
List<Object> annotations;
int annotationIndex = line.indexOf('@');
annotations = annotationIndex == -1 ?
Collections.emptyList() : parseWhitelistAnnotations(parsers, line.substring(annotationIndex));
whitelistMethods.add(new WhitelistMethod(origin, javaAugmentedClassName, methodName,
returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters),
annotations));
// Handle the case for a field definition.
// Expects the following format: ID ID annotations? '\n'
} else {
// Parse the annotations if they exist.
List<Object> annotations;
int annotationIndex = line.indexOf('@');
if (annotationIndex == -1) {
annotationIndex = line.length();
annotations = Collections.emptyList();
} else {
annotations = parseWhitelistAnnotations(parsers, line.substring(annotationIndex));
}
// Parse the field tokens.
String[] tokens = line.substring(0, annotationIndex).split("\\s+");
// Ensure the correct number of tokens.
if (tokens.length != 2) {
throw new IllegalArgumentException("invalid field definition: unexpected format [" + line + "]");
}
whitelistFields.add(new WhitelistField(origin, tokens[1], tokens[0], annotations));
}
} else {
throw new IllegalArgumentException("invalid definition: unable to parse line [" + line + "]");
}
}
// Ensure all classes end with a '}' token before the end of the file.
if (javaClassName != null) {
throw new IllegalArgumentException("invalid definition: expected closing bracket");
}
} catch (Exception exception) {
throw new RuntimeException("error in [" + filepath + "] at line [" + number + "]", exception);
}
}
ClassLoader loader = AccessController.doPrivileged((PrivilegedAction<ClassLoader>)resource::getClassLoader);
return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistClassBindings, Collections.emptyList());
}
private static List<Object> parseWhitelistAnnotations(
Map<String, WhitelistAnnotationParser> parsers, String line) {
List<Object> annotations;
if (line.isBlank()) {
annotations = Collections.emptyList();
} else {
line = line.trim();
if (line.charAt(0) != '@') {
throw new IllegalArgumentException("invalid annotation: expected at symbol [" + line + "]");
}
if (line.length() < 2) {
throw new IllegalArgumentException("invalid annotation: expected name [" + line + "]");
}
String[] annotationStrings = line.substring(1).split("@");
annotations = new ArrayList<>(annotationStrings.length);
for (String annotationString : annotationStrings) {
String name;
Map<String, String> arguments;
annotationString = annotationString.trim();
int index = annotationString.indexOf('[');
if (index == -1) {
name = annotationString;
arguments = Collections.emptyMap();
} else {
if (annotationString.charAt(annotationString.length() - 1) != ']') {
throw new IllegalArgumentException("invalid annotation: expected closing brace [" + line + "]");
}
name = annotationString.substring(0, index);
arguments = new HashMap<>();
String[] argumentsStrings = annotationString.substring(index + 1, annotationString.length() - 1).split(",");
for (String argumentString : argumentsStrings) {
String[] argumentKeyValue = argumentString.split("=");
if (argumentKeyValue.length != 2) {
throw new IllegalArgumentException("invalid annotation: expected key=\"value\" [" + line + "]");
}
String argumentKey = argumentKeyValue[0].trim();
if (argumentKey.isEmpty()) {
throw new IllegalArgumentException("invalid annotation: expected key=\"value\" [" + line + "]");
}
String argumentValue = argumentKeyValue[1];
if (argumentValue.length() < 3 || argumentValue.charAt(0) != '"' ||
argumentValue.charAt(argumentValue.length() - 1) != '"') {
throw new IllegalArgumentException("invalid annotation: expected key=\"value\" [" + line + "]");
}
argumentValue = argumentValue.substring(1, argumentValue.length() - 1);
arguments.put(argumentKey, argumentValue);
}
}
WhitelistAnnotationParser parser = parsers.get(name);
if (parser == null) {
throw new IllegalArgumentException("invalid annotation: parser not found for [" + name + "] [" + line + "]");
}
annotations.add(parser.parse(arguments));
}
}
return annotations;
}
private WhitelistLoader() {}
}
| |
/*
* Copyright (c) 2014-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.stetho.inspector.elements.android;
import com.facebook.stetho.common.Accumulator;
import com.facebook.stetho.common.Predicate;
import com.facebook.stetho.common.ThreadBound;
import com.facebook.stetho.common.Util;
import com.facebook.stetho.common.android.ViewUtil;
import com.facebook.stetho.inspector.elements.Descriptor;
import com.facebook.stetho.inspector.elements.DescriptorMap;
import com.facebook.stetho.inspector.elements.DocumentProvider;
import com.facebook.stetho.inspector.elements.DocumentProviderListener;
import com.facebook.stetho.inspector.elements.NodeDescriptor;
import com.facebook.stetho.inspector.elements.ObjectDescriptor;
import com.facebook.stetho.inspector.helper.ThreadBoundProxy;
import android.app.Activity;
import android.app.Application;
import android.app.Dialog;
import android.content.Context;
import android.graphics.Canvas;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nullable;
final class AndroidDocumentProvider extends ThreadBoundProxy implements DocumentProvider, AndroidDescriptorHost {
private static final int INSPECT_OVERLAY_COLOR = 0x40FFFFFF;
private static final int INSPECT_HOVER_COLOR = 0x404040ff;
private final Application mApplication;
private final DescriptorMap mDescriptorMap;
private final AndroidDocumentRoot mDocumentRoot;
private final ViewHighlighter mHighlighter;
private final InspectModeHandler mInspectModeHandler;
private
@Nullable
DocumentProviderListener mListener;
// We don't yet have an an implementation for reliably detecting fine-grained changes in the
// View tree. So, for now at least, we have a timer that runs every so often and just reports
// that we changed. Our listener will then read the entire Document from us and transmit the
// changes to Chrome. Detecting, reporting, and traversing fine-grained changes is a future work
// item (see Issue #210).
private static final long REPORT_CHANGED_INTERVAL_MS = 1000;
private boolean mIsReportChangesTimerPosted = false;
private final Runnable mReportChangesTimer = new Runnable() {
@Override
public void run() {
mIsReportChangesTimerPosted = false;
if (mListener != null) {
mListener.onPossiblyChanged();
mIsReportChangesTimerPosted = true;
postDelayed(this, REPORT_CHANGED_INTERVAL_MS);
}
}
};
public AndroidDocumentProvider(Application application, ThreadBound enforcer) {
super(enforcer);
mApplication = Util.throwIfNull(application);
mDocumentRoot = new AndroidDocumentRoot(application);
mDescriptorMap = new DescriptorMap().beginInit().register(Activity.class, new ActivityDescriptor()).register(AndroidDocumentRoot.class,
mDocumentRoot).register(Application.class, new ApplicationDescriptor()).register(Dialog.class, new DialogDescriptor());
DialogFragmentDescriptor.register(mDescriptorMap);
FragmentDescriptor.register(mDescriptorMap).register(Object.class, new ObjectDescriptor()).register(TextView.class, new TextViewDescriptor
()).register(View.class, new ViewDescriptor()).register(ViewGroup.class, new ViewGroupDescriptor()).register(Window.class, new
WindowDescriptor()).setHost(this).endInit();
mHighlighter = ViewHighlighter.newInstance();
mInspectModeHandler = new InspectModeHandler();
}
@Override
public void dispose() {
verifyThreadAccess();
mHighlighter.clearHighlight();
mInspectModeHandler.disable();
removeCallbacks(mReportChangesTimer);
mIsReportChangesTimerPosted = false;
mListener = null;
}
@Override
public void setListener(DocumentProviderListener listener) {
verifyThreadAccess();
mListener = listener;
if (mListener == null && mIsReportChangesTimerPosted) {
mIsReportChangesTimerPosted = false;
removeCallbacks(mReportChangesTimer);
} else if (mListener != null && !mIsReportChangesTimerPosted) {
mIsReportChangesTimerPosted = true;
postDelayed(mReportChangesTimer, REPORT_CHANGED_INTERVAL_MS);
}
}
@Override
public Object getRootElement() {
verifyThreadAccess();
return mDocumentRoot;
}
@Override
public NodeDescriptor getNodeDescriptor(Object element) {
verifyThreadAccess();
return getDescriptor(element);
}
@Override
public void highlightElement(Object element, int color) {
verifyThreadAccess();
View highlightingView = getHighlightingView(element);
if (highlightingView == null) {
mHighlighter.clearHighlight();
} else {
mHighlighter.setHighlightedView(highlightingView, color);
}
}
@Override
public void hideHighlight() {
verifyThreadAccess();
mHighlighter.clearHighlight();
}
@Override
public void setInspectModeEnabled(boolean enabled) {
verifyThreadAccess();
if (enabled) {
mInspectModeHandler.enable();
} else {
mInspectModeHandler.disable();
}
}
@Override
public void setAttributesAsText(Object element, String text) {
verifyThreadAccess();
Descriptor descriptor = mDescriptorMap.get(element.getClass());
if (descriptor != null) {
descriptor.setAttributesAsText(element, text);
}
}
// Descriptor.Host implementation
@Override
public Descriptor getDescriptor(Object element) {
return (element == null) ? null : mDescriptorMap.get(element.getClass());
}
@Override
public void onAttributeModified(Object element, String name, String value) {
if (mListener != null) {
mListener.onAttributeModified(element, name, value);
}
}
@Override
public void onAttributeRemoved(Object element, String name) {
if (mListener != null) {
mListener.onAttributeRemoved(element, name);
}
}
// AndroidDescriptorHost implementation
@Override
public View getHighlightingView(Object element) {
if (element == null) {
return null;
}
View highlightingView = null;
Class<?> theClass = element.getClass();
Descriptor lastDescriptor = null;
while (highlightingView == null && theClass != null) {
Descriptor descriptor = mDescriptorMap.get(theClass);
if (descriptor == null) {
return null;
}
if (descriptor != lastDescriptor && descriptor instanceof HighlightableDescriptor) {
highlightingView = ((HighlightableDescriptor) descriptor).getViewForHighlighting(element);
}
lastDescriptor = descriptor;
theClass = theClass.getSuperclass();
}
return highlightingView;
}
private void getWindows(final Accumulator<Window> accumulator) {
Descriptor appDescriptor = getDescriptor(mApplication);
if (appDescriptor != null) {
Accumulator<Object> elementAccumulator = new Accumulator<Object>() {
@Override
public void store(Object element) {
if (element instanceof Window) {
// Store the Window and do not recurse into its children.
accumulator.store((Window) element);
} else {
// Recursively scan this element's children in search of more Windows.
Descriptor elementDescriptor = getDescriptor(element);
if (elementDescriptor != null) {
elementDescriptor.getChildren(element, this);
}
}
}
};
appDescriptor.getChildren(mApplication, elementAccumulator);
}
}
private final class InspectModeHandler {
private final Predicate<View> mViewSelector = new Predicate<View>() {
@Override
public boolean apply(View view) {
return !(view instanceof DocumentHiddenView);
}
};
private List<View> mOverlays;
public void enable() {
verifyThreadAccess();
if (mOverlays != null) {
disable();
}
mOverlays = new ArrayList<>();
getWindows(new Accumulator<Window>() {
@Override
public void store(Window object) {
if (object.peekDecorView() instanceof ViewGroup) {
final ViewGroup decorView = (ViewGroup) object.peekDecorView();
OverlayView overlayView = new OverlayView(mApplication);
WindowManager.LayoutParams layoutParams = new WindowManager.LayoutParams();
layoutParams.width = WindowManager.LayoutParams.MATCH_PARENT;
layoutParams.height = WindowManager.LayoutParams.MATCH_PARENT;
decorView.addView(overlayView, layoutParams);
decorView.bringChildToFront(overlayView);
mOverlays.add(overlayView);
}
}
});
}
public void disable() {
verifyThreadAccess();
if (mOverlays == null) {
return;
}
for (int i = 0; i < mOverlays.size(); ++i) {
final View overlayView = mOverlays.get(i);
ViewGroup decorViewGroup = (ViewGroup) overlayView.getParent();
decorViewGroup.removeView(overlayView);
}
mOverlays = null;
}
private final class OverlayView extends DocumentHiddenView {
public OverlayView(Context context) {
super(context);
}
@Override
protected void onDraw(Canvas canvas) {
canvas.drawColor(INSPECT_OVERLAY_COLOR);
super.onDraw(canvas);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (getParent() instanceof View) {
final View parent = (View) getParent();
View view = ViewUtil.hitTest(parent, event.getX(), event.getY(), mViewSelector);
if (event.getAction() != MotionEvent.ACTION_CANCEL) {
if (view != null) {
mHighlighter.setHighlightedView(view, INSPECT_HOVER_COLOR);
if (event.getAction() == MotionEvent.ACTION_UP) {
if (mListener != null) {
mListener.onInspectRequested(view);
}
}
}
}
}
return true;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.utils;
import org.apache.dubbo.common.logger.Logger;
import org.apache.dubbo.common.logger.LoggerFactory;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListMap;
/**
* PojoUtils. Travel object deeply, and convert complex type to simple type.
* <p/>
* Simple type below will be remained:
* <ul>
* <li> Primitive Type, also include <b>String</b>, <b>Number</b>(Integer, Long), <b>Date</b>
* <li> Array of Primitive Type
* <li> Collection, eg: List, Map, Set etc.
* </ul>
* <p/>
* Other type will be covert to a map which contains the attributes and value pair of object.
*/
public class PojoUtils {
private static final Logger logger = LoggerFactory.getLogger(PojoUtils.class);
private static final ConcurrentMap<String, Method> NAME_METHODS_CACHE = new ConcurrentHashMap<String, Method>();
private static final ConcurrentMap<Class<?>, ConcurrentMap<String, Field>> CLASS_FIELD_CACHE = new ConcurrentHashMap<Class<?>, ConcurrentMap<String, Field>>();
public static Object[] generalize(Object[] objs) {
Object[] dests = new Object[objs.length];
for (int i = 0; i < objs.length; i++) {
dests[i] = generalize(objs[i]);
}
return dests;
}
public static Object[] realize(Object[] objs, Class<?>[] types) {
if (objs.length != types.length) {
throw new IllegalArgumentException("args.length != types.length");
}
Object[] dests = new Object[objs.length];
for (int i = 0; i < objs.length; i++) {
dests[i] = realize(objs[i], types[i]);
}
return dests;
}
public static Object[] realize(Object[] objs, Class<?>[] types, Type[] gtypes) {
if (objs.length != types.length || objs.length != gtypes.length) {
throw new IllegalArgumentException("args.length != types.length");
}
Object[] dests = new Object[objs.length];
for (int i = 0; i < objs.length; i++) {
dests[i] = realize(objs[i], types[i], gtypes[i]);
}
return dests;
}
public static Object generalize(Object pojo) {
return generalize(pojo, new IdentityHashMap<Object, Object>());
}
@SuppressWarnings("unchecked")
private static Object generalize(Object pojo, Map<Object, Object> history) {
if (pojo == null) {
return null;
}
if (pojo instanceof Enum<?>) {
return ((Enum<?>) pojo).name();
}
if (pojo.getClass().isArray() && Enum.class.isAssignableFrom(pojo.getClass().getComponentType())) {
int len = Array.getLength(pojo);
String[] values = new String[len];
for (int i = 0; i < len; i++) {
values[i] = ((Enum<?>) Array.get(pojo, i)).name();
}
return values;
}
if (ReflectUtils.isPrimitives(pojo.getClass())) {
return pojo;
}
if (pojo instanceof Class) {
return ((Class) pojo).getName();
}
Object o = history.get(pojo);
if (o != null) {
return o;
}
history.put(pojo, pojo);
if (pojo.getClass().isArray()) {
int len = Array.getLength(pojo);
Object[] dest = new Object[len];
history.put(pojo, dest);
for (int i = 0; i < len; i++) {
Object obj = Array.get(pojo, i);
dest[i] = generalize(obj, history);
}
return dest;
}
if (pojo instanceof Collection<?>) {
Collection<Object> src = (Collection<Object>) pojo;
int len = src.size();
Collection<Object> dest = (pojo instanceof List<?>) ? new ArrayList<Object>(len) : new HashSet<Object>(len);
history.put(pojo, dest);
for (Object obj : src) {
dest.add(generalize(obj, history));
}
return dest;
}
if (pojo instanceof Map<?, ?>) {
Map<Object, Object> src = (Map<Object, Object>) pojo;
Map<Object, Object> dest = createMap(src);
history.put(pojo, dest);
for (Map.Entry<Object, Object> obj : src.entrySet()) {
dest.put(generalize(obj.getKey(), history), generalize(obj.getValue(), history));
}
return dest;
}
Map<String, Object> map = new HashMap<String, Object>();
history.put(pojo, map);
map.put("class", pojo.getClass().getName());
for (Method method : pojo.getClass().getMethods()) {
if (ReflectUtils.isBeanPropertyReadMethod(method)) {
try {
map.put(ReflectUtils.getPropertyNameFromBeanReadMethod(method), generalize(method.invoke(pojo), history));
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
}
// public field
for (Field field : pojo.getClass().getFields()) {
if (ReflectUtils.isPublicInstanceField(field)) {
try {
Object fieldValue = field.get(pojo);
if (history.containsKey(pojo)) {
Object pojoGeneralizedValue = history.get(pojo);
if (pojoGeneralizedValue instanceof Map
&& ((Map) pojoGeneralizedValue).containsKey(field.getName())) {
continue;
}
}
if (fieldValue != null) {
map.put(field.getName(), generalize(fieldValue, history));
}
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
}
return map;
}
public static Object realize(Object pojo, Class<?> type) {
return realize0(pojo, type, null, new IdentityHashMap<Object, Object>());
}
public static Object realize(Object pojo, Class<?> type, Type genericType) {
return realize0(pojo, type, genericType, new IdentityHashMap<Object, Object>());
}
private static class PojoInvocationHandler implements InvocationHandler {
private Map<Object, Object> map;
public PojoInvocationHandler(Map<Object, Object> map) {
this.map = map;
}
@Override
@SuppressWarnings("unchecked")
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
if (method.getDeclaringClass() == Object.class) {
return method.invoke(map, args);
}
String methodName = method.getName();
Object value = null;
if (methodName.length() > 3 && methodName.startsWith("get")) {
value = map.get(methodName.substring(3, 4).toLowerCase() + methodName.substring(4));
} else if (methodName.length() > 2 && methodName.startsWith("is")) {
value = map.get(methodName.substring(2, 3).toLowerCase() + methodName.substring(3));
} else {
value = map.get(methodName.substring(0, 1).toLowerCase() + methodName.substring(1));
}
if (value instanceof Map<?, ?> && !Map.class.isAssignableFrom(method.getReturnType())) {
value = realize0((Map<String, Object>) value, method.getReturnType(), null, new IdentityHashMap<Object, Object>());
}
return value;
}
}
@SuppressWarnings("unchecked")
private static Collection<Object> createCollection(Class<?> type, int len) {
if (type.isAssignableFrom(ArrayList.class)) {
return new ArrayList<Object>(len);
}
if (type.isAssignableFrom(HashSet.class)) {
return new HashSet<Object>(len);
}
if (!type.isInterface() && !Modifier.isAbstract(type.getModifiers())) {
try {
return (Collection<Object>) type.newInstance();
} catch (Exception e) {
// ignore
}
}
return new ArrayList<Object>();
}
private static Map createMap(Map src) {
Class<? extends Map> cl = src.getClass();
Map result = null;
if (HashMap.class == cl) {
result = new HashMap();
} else if (Hashtable.class == cl) {
result = new Hashtable();
} else if (IdentityHashMap.class == cl) {
result = new IdentityHashMap();
} else if (LinkedHashMap.class == cl) {
result = new LinkedHashMap();
} else if (Properties.class == cl) {
result = new Properties();
} else if (TreeMap.class == cl) {
result = new TreeMap();
} else if (WeakHashMap.class == cl) {
return new WeakHashMap();
} else if (ConcurrentHashMap.class == cl) {
result = new ConcurrentHashMap();
} else if (ConcurrentSkipListMap.class == cl) {
result = new ConcurrentSkipListMap();
} else {
try {
result = cl.newInstance();
} catch (Exception e) { /* ignore */ }
if (result == null) {
try {
Constructor<?> constructor = cl.getConstructor(Map.class);
result = (Map) constructor.newInstance(Collections.EMPTY_MAP);
} catch (Exception e) { /* ignore */ }
}
}
if (result == null) {
result = new HashMap<Object, Object>();
}
return result;
}
@SuppressWarnings({"unchecked", "rawtypes"})
private static Object realize0(Object pojo, Class<?> type, Type genericType, final Map<Object, Object> history) {
if (pojo == null) {
return null;
}
if (type != null && type.isEnum() && pojo.getClass() == String.class) {
return Enum.valueOf((Class<Enum>) type, (String) pojo);
}
if (ReflectUtils.isPrimitives(pojo.getClass())
&& !(type != null && type.isArray()
&& type.getComponentType().isEnum()
&& pojo.getClass() == String[].class)) {
return CompatibleTypeUtils.compatibleTypeConvert(pojo, type);
}
Object o = history.get(pojo);
if (o != null) {
return o;
}
history.put(pojo, pojo);
if (pojo.getClass().isArray()) {
if (Collection.class.isAssignableFrom(type)) {
Class<?> ctype = pojo.getClass().getComponentType();
int len = Array.getLength(pojo);
Collection dest = createCollection(type, len);
history.put(pojo, dest);
for (int i = 0; i < len; i++) {
Object obj = Array.get(pojo, i);
Object value = realize0(obj, ctype, null, history);
dest.add(value);
}
return dest;
} else {
Class<?> ctype = (type != null && type.isArray() ? type.getComponentType() : pojo.getClass().getComponentType());
int len = Array.getLength(pojo);
Object dest = Array.newInstance(ctype, len);
history.put(pojo, dest);
for (int i = 0; i < len; i++) {
Object obj = Array.get(pojo, i);
Object value = realize0(obj, ctype, null, history);
Array.set(dest, i, value);
}
return dest;
}
}
if (pojo instanceof Collection<?>) {
if (type.isArray()) {
Class<?> ctype = type.getComponentType();
Collection<Object> src = (Collection<Object>) pojo;
int len = src.size();
Object dest = Array.newInstance(ctype, len);
history.put(pojo, dest);
int i = 0;
for (Object obj : src) {
Object value = realize0(obj, ctype, null, history);
Array.set(dest, i, value);
i++;
}
return dest;
} else {
Collection<Object> src = (Collection<Object>) pojo;
int len = src.size();
Collection<Object> dest = createCollection(type, len);
history.put(pojo, dest);
for (Object obj : src) {
Type keyType = getGenericClassByIndex(genericType, 0);
Class<?> keyClazz = obj.getClass();
if (keyType instanceof Class) {
keyClazz = (Class<?>) keyType;
}
Object value = realize0(obj, keyClazz, keyType, history);
dest.add(value);
}
return dest;
}
}
if (pojo instanceof Map<?, ?> && type != null) {
Object className = ((Map<Object, Object>) pojo).get("class");
if (className instanceof String) {
try {
type = ClassUtils.forName((String) className);
} catch (ClassNotFoundException e) {
// ignore
}
}
// special logic for enum
if (type.isEnum()) {
Object name = ((Map<Object, Object>) pojo).get("name");
if (name != null) {
return Enum.valueOf((Class<Enum>) type, name.toString());
}
}
Map<Object, Object> map;
// when return type is not the subclass of return type from the signature and not an interface
if (!type.isInterface() && !type.isAssignableFrom(pojo.getClass())) {
try {
map = (Map<Object, Object>) type.newInstance();
Map<Object, Object> mapPojo = (Map<Object, Object>) pojo;
map.putAll(mapPojo);
map.remove("class");
} catch (Exception e) {
//ignore error
map = (Map<Object, Object>) pojo;
}
} else {
map = (Map<Object, Object>) pojo;
}
if (Map.class.isAssignableFrom(type) || type == Object.class) {
final Map<Object, Object> result = createMap(map);
history.put(pojo, result);
for (Map.Entry<Object, Object> entry : map.entrySet()) {
Type keyType = getGenericClassByIndex(genericType, 0);
Type valueType = getGenericClassByIndex(genericType, 1);
Class<?> keyClazz;
if (keyType instanceof Class) {
keyClazz = (Class<?>) keyType;
} else if (keyType instanceof ParameterizedType) {
keyClazz = (Class<?>) ((ParameterizedType) keyType).getRawType();
} else {
keyClazz = entry.getKey() == null ? null : entry.getKey().getClass();
}
Class<?> valueClazz;
if (valueType instanceof Class) {
valueClazz = (Class<?>) valueType;
} else if (valueType instanceof ParameterizedType) {
valueClazz = (Class<?>) ((ParameterizedType) valueType).getRawType();
} else {
valueClazz = entry.getValue() == null ? null : entry.getValue().getClass();
}
Object key = keyClazz == null ? entry.getKey() : realize0(entry.getKey(), keyClazz, keyType, history);
Object value = valueClazz == null ? entry.getValue() : realize0(entry.getValue(), valueClazz, valueType, history);
result.put(key, value);
}
return result;
} else if (type.isInterface()) {
Object dest = Proxy.newProxyInstance(Thread.currentThread().getContextClassLoader(), new Class<?>[]{type}, new PojoInvocationHandler(map));
history.put(pojo, dest);
return dest;
} else {
Object dest = newInstance(type);
history.put(pojo, dest);
for (Map.Entry<Object, Object> entry : map.entrySet()) {
Object key = entry.getKey();
if (key instanceof String) {
String name = (String) key;
Object value = entry.getValue();
if (value != null) {
Method method = getSetterMethod(dest.getClass(), name, value.getClass());
Field field = getField(dest.getClass(), name);
if (method != null) {
if (!method.isAccessible()) {
method.setAccessible(true);
}
Type ptype = method.getGenericParameterTypes()[0];
value = realize0(value, method.getParameterTypes()[0], ptype, history);
try {
method.invoke(dest, value);
} catch (Exception e) {
String exceptionDescription = "Failed to set pojo " + dest.getClass().getSimpleName() + " property " + name
+ " value " + value + "(" + value.getClass() + "), cause: " + e.getMessage();
logger.error(exceptionDescription, e);
throw new RuntimeException(exceptionDescription, e);
}
} else if (field != null) {
value = realize0(value, field.getType(), field.getGenericType(), history);
try {
field.set(dest, value);
} catch (IllegalAccessException e) {
throw new RuntimeException("Failed to set field " + name + " of pojo " + dest.getClass().getName() + " : " + e.getMessage(), e);
}
}
}
}
}
if (dest instanceof Throwable) {
Object message = map.get("message");
if (message instanceof String) {
try {
Field field = Throwable.class.getDeclaredField("detailMessage");
if (!field.isAccessible()) {
field.setAccessible(true);
}
field.set(dest, message);
} catch (Exception e) {
}
}
}
return dest;
}
}
return pojo;
}
/**
* Get parameterized type
*
* @param genericType generic type
* @param index index of the target parameterized type
* @return Return Person.class for List<Person>, return Person.class for Map<String, Person> when index=0
*/
private static Type getGenericClassByIndex(Type genericType, int index) {
Type clazz = null;
// find parameterized type
if (genericType instanceof ParameterizedType) {
ParameterizedType t = (ParameterizedType) genericType;
Type[] types = t.getActualTypeArguments();
clazz = types[index];
}
return clazz;
}
private static Object newInstance(Class<?> cls) {
try {
return cls.newInstance();
} catch (Throwable t) {
try {
Constructor<?>[] constructors = cls.getDeclaredConstructors();
/**
* From Javadoc java.lang.Class#getDeclaredConstructors
* This method returns an array of Constructor objects reflecting all the constructors
* declared by the class represented by this Class object.
* This method returns an array of length 0,
* if this Class object represents an interface, a primitive type, an array class, or void.
*/
if (constructors.length == 0) {
throw new RuntimeException("Illegal constructor: " + cls.getName());
}
Constructor<?> constructor = constructors[0];
if (constructor.getParameterTypes().length > 0) {
for (Constructor<?> c : constructors) {
if (c.getParameterTypes().length < constructor.getParameterTypes().length) {
constructor = c;
if (constructor.getParameterTypes().length == 0) {
break;
}
}
}
}
constructor.setAccessible(true);
Object[] parameters = Arrays.stream(constructor.getParameterTypes()).map(PojoUtils::getDefaultValue).toArray();
return constructor.newInstance(parameters);
} catch (InstantiationException e) {
throw new RuntimeException(e.getMessage(), e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e.getMessage(), e);
} catch (InvocationTargetException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
}
/**
* return init value
* @param parameterType
* @return
*/
private static Object getDefaultValue(Class<?> parameterType) {
if (parameterType.getName().equals("char")) {
return Character.MIN_VALUE;
}
if (parameterType.getName().equals("bool")) {
return false;
}
return parameterType.isPrimitive() ? 0 : null;
}
private static Method getSetterMethod(Class<?> cls, String property, Class<?> valueCls) {
String name = "set" + property.substring(0, 1).toUpperCase() + property.substring(1);
Method method = NAME_METHODS_CACHE.get(cls.getName() + "." + name + "(" + valueCls.getName() + ")");
if (method == null) {
try {
method = cls.getMethod(name, valueCls);
} catch (NoSuchMethodException e) {
for (Method m : cls.getMethods()) {
if (ReflectUtils.isBeanPropertyWriteMethod(m) && m.getName().equals(name)) {
method = m;
}
}
}
if (method != null) {
NAME_METHODS_CACHE.put(cls.getName() + "." + name + "(" + valueCls.getName() + ")", method);
}
}
return method;
}
private static Field getField(Class<?> cls, String fieldName) {
Field result = null;
if (CLASS_FIELD_CACHE.containsKey(cls) && CLASS_FIELD_CACHE.get(cls).containsKey(fieldName)) {
return CLASS_FIELD_CACHE.get(cls).get(fieldName);
}
try {
result = cls.getDeclaredField(fieldName);
result.setAccessible(true);
} catch (NoSuchFieldException e) {
for (Field field : cls.getFields()) {
if (fieldName.equals(field.getName()) && ReflectUtils.isPublicInstanceField(field)) {
result = field;
break;
}
}
}
if (result != null) {
ConcurrentMap<String, Field> fields = CLASS_FIELD_CACHE.get(cls);
if (fields == null) {
fields = new ConcurrentHashMap<String, Field>();
CLASS_FIELD_CACHE.putIfAbsent(cls, fields);
}
fields = CLASS_FIELD_CACHE.get(cls);
fields.putIfAbsent(fieldName, result);
}
return result;
}
public static boolean isPojo(Class<?> cls) {
return !ReflectUtils.isPrimitives(cls)
&& !Collection.class.isAssignableFrom(cls)
&& !Map.class.isAssignableFrom(cls);
}
}
| |
package alien4cloud.orchestrators.services;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
import javax.annotation.Resource;
import javax.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.springframework.stereotype.Service;
import alien4cloud.dao.IGenericSearchDAO;
import alien4cloud.dao.model.GetMultipleDataResult;
import alien4cloud.exception.AlreadyExistException;
import alien4cloud.exception.NotFoundException;
import alien4cloud.model.orchestrators.ArtifactSupport;
import alien4cloud.model.orchestrators.Orchestrator;
import alien4cloud.model.orchestrators.OrchestratorConfiguration;
import alien4cloud.model.orchestrators.OrchestratorState;
import alien4cloud.model.orchestrators.locations.Location;
import alien4cloud.model.orchestrators.locations.LocationSupport;
import alien4cloud.orchestrators.locations.services.LocationService;
import alien4cloud.orchestrators.plugin.IOrchestratorPluginFactory;
import alien4cloud.utils.MapUtil;
/**
* Manages orchestrators
*/
@Slf4j
@Service
public class OrchestratorService {
public static final String[] ENABLED_STATES = new String[] { OrchestratorState.CONNECTED.toString().toLowerCase(),
OrchestratorState.CONNECTING.toString().toLowerCase(), OrchestratorState.DISCONNECTED.toString().toLowerCase() };
@Resource(name = "alien-es-dao")
private IGenericSearchDAO alienDAO;
@Inject
private OrchestratorFactoriesRegistry orchestratorFactoriesRegistry;
@Inject
private LocationService locationService;
/**
* Creates an orchestrator.
*
* @param name The unique name that defines the orchestrator from user point of view.
* @param pluginId The id of the plugin used to communicate with the orchestrator.
* @param pluginBean The bean in the plugin that is indeed managing communication.
* @return The generated identifier for the orchestrator.
*/
public synchronized String create(String name, String pluginId, String pluginBean) {
Orchestrator orchestrator = new Orchestrator();
// generate an unique id
orchestrator.setId(UUID.randomUUID().toString());
orchestrator.setName(name);
orchestrator.setPluginId(pluginId);
orchestrator.setPluginBean(pluginBean);
// by default clouds are disabled as it should be configured before being enabled.
orchestrator.setState(OrchestratorState.DISABLED);
orchestrator.setAuthorizedUsers(new ArrayList<String>());
orchestrator.setAuthorizedGroups(new ArrayList<String>());
// get default configuration for the orchestrator.
IOrchestratorPluginFactory orchestratorFactory = getPluginFactory(orchestrator);
OrchestratorConfiguration configuration = new OrchestratorConfiguration(orchestrator.getId(), orchestratorFactory.getDefaultConfiguration());
ensureNameUnicityAndSave(orchestrator);
alienDAO.save(configuration);
return orchestrator.getId();
}
/**
* Save the orchestrator but ensure that the name is unique before saving it.
*
* @param orchestrator The orchestrator to save.
*/
private synchronized void ensureNameUnicityAndSave(Orchestrator orchestrator) {
ensureNameUnicityAndSave(orchestrator, null);
}
/**
* Save the orchestrator but ensure that the name is unique before saving it.
*
* @param orchestrator The orchestrator to save.
*/
public synchronized void ensureNameUnicityAndSave(Orchestrator orchestrator, String oldName) {
if (StringUtils.isBlank(oldName) || !Objects.equals(orchestrator.getName(), oldName)) {
// check that the orchestrator doesn't already exists
if (alienDAO.count(Orchestrator.class, QueryBuilders.termQuery("name", orchestrator.getName())) > 0) {
throw new AlreadyExistException("a cloud with the given name already exists.");
}
}
alienDAO.save(orchestrator);
}
/**
* Delete an existing orchestrator.
*
* @param id The id of the orchestrator to delete.
*/
public void delete(String id) {
// delete all locations for the orchestrator
Location[] locations = locationService.getOrchestratorLocations(id);
if (locations != null) {
for (Location location : locations) {
locationService.delete(id, location.getId());
}
}
// delete the orchestrator configuration
alienDAO.delete(OrchestratorConfiguration.class, id);
alienDAO.delete(Orchestrator.class, id);
}
/**
* Get the orchestrator matching the given id
*
* @param id If of the orchestrator that we want to get.
* @return An instance of the orchestrator.
*/
public Orchestrator get(String id) {
return alienDAO.findById(Orchestrator.class, id);
}
/**
* Get the orchestrator matching the given id or throw a NotFoundException
*
* @param id If of the orchestrator that we want to get.
* @return An instance of the orchestrator.
*/
public Orchestrator getOrFail(String id) {
Orchestrator orchestrator = alienDAO.findById(Orchestrator.class, id);
if (orchestrator == null) {
throw new NotFoundException("Orchestrator [" + id + "] doesn't exists.");
}
return orchestrator;
}
/**
* Get multiple orchestrators.
*
* @param query The query to apply to filter orchestrators.
* @param from The start index of the query.
* @param size The maximum number of elements to return.
* @param authorizationFilter authorization filter
* @return A {@link GetMultipleDataResult} that contains Orchestrator objects.
*/
public GetMultipleDataResult<Orchestrator> search(String query, OrchestratorState status, int from, int size, FilterBuilder authorizationFilter) {
Map<String, String[]> filters = null;
if (status != null) {
filters = MapUtil.newHashMap(new String[] { "status" }, new String[][] { new String[] { status.toString() } });
}
return alienDAO.search(Orchestrator.class, query, filters, authorizationFilter, null, from, size);
}
/**
* Get the location support information for a given orchestrator.
*
* @param orchestratorId The id of the orchestrator for which to get location support information.
* @return location support information.
*/
public LocationSupport getLocationSupport(String orchestratorId) {
Orchestrator orchestrator = getOrFail(orchestratorId);
IOrchestratorPluginFactory orchestratorFactory = getPluginFactory(orchestrator);
return orchestratorFactory.getLocationSupport();
}
/**
* Get the artifact support information for a given orchestrator.
*
* @param orchestratorId The id of the orchestrator for which to get location support information.
* @return artifact support information.
*/
public ArtifactSupport getArtifactSupport(String orchestratorId) {
Orchestrator orchestrator = getOrFail(orchestratorId);
IOrchestratorPluginFactory orchestratorFactory = getPluginFactory(orchestrator);
return orchestratorFactory.getArtifactSupport();
}
/**
* Get the orchestrator plugin factory for the given orchestrator.
*
* @param orchestrator The orchestrator for which to get the orchestrator plugin factory.
* @return An instance of the orchestrator plugin factory for the given orchestrator.
*/
public IOrchestratorPluginFactory getPluginFactory(Orchestrator orchestrator) {
return orchestratorFactoriesRegistry.getPluginBean(orchestrator.getPluginId(), orchestrator.getPluginBean());
}
public List<Orchestrator> getAllEnabledOrchestrators() {
return alienDAO.customFindAll(Orchestrator.class, QueryBuilders.termsQuery("state", ENABLED_STATES));
}
public List<Orchestrator> getAll() {
return alienDAO.customFindAll(Orchestrator.class, null);
}
}
| |
package com.dtolabs.rundeck.core.execution.workflow.state;
import com.dtolabs.rundeck.core.common.INodeEntry;
import com.dtolabs.rundeck.core.execution.ExecutionContext;
import com.dtolabs.rundeck.core.execution.StatusResult;
import com.dtolabs.rundeck.core.execution.StepExecutionItem;
import com.dtolabs.rundeck.core.execution.dispatch.INodeEntryComparator;
import com.dtolabs.rundeck.core.execution.workflow.*;
import com.dtolabs.rundeck.core.execution.workflow.steps.NodeDispatchStepExecutor;
import com.dtolabs.rundeck.core.execution.workflow.steps.StepExecutionResult;
import com.dtolabs.rundeck.core.execution.workflow.steps.StepExecutor;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutionItem;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepResult;
import com.dtolabs.rundeck.core.utils.Pair;
import java.util.*;
/**
* Adapts events from a {@link WorkflowExecutionListener} and sends changes to a list of {@link WorkflowStateListener}s.
*/
public class WorkflowExecutionStateListenerAdapter implements WorkflowExecutionListener {
List<WorkflowStateListener> listeners;
StepContextWorkflowExecutionListener<INodeEntry, StepContextId> stepContext;
public WorkflowExecutionStateListenerAdapter() {
this(new ArrayList<WorkflowStateListener>());
}
public WorkflowExecutionStateListenerAdapter(List<WorkflowStateListener> listeners) {
this.listeners = listeners;
stepContext = new StepContextWorkflowExecutionListener<INodeEntry, StepContextId>();
}
public void addWorkflowStateListener(WorkflowStateListener listener) {
listeners.add(listener);
}
private void notifyAllWorkflowState(ExecutionState executionState, Date timestamp, List<String> nodenames) {
for (WorkflowStateListener listener : listeners) {
listener.workflowExecutionStateChanged(executionState, timestamp, nodenames);
}
}
private void notifyAllSubWorkflowState(StepIdentifier identifier, ExecutionState executionState, Date timestamp,
List<String> nodenames) {
for (WorkflowStateListener listener : listeners) {
listener.subWorkflowExecutionStateChanged(identifier,executionState, timestamp, nodenames);
}
}
private void notifyAllStepState(StepIdentifier identifier, StepStateChange stepStateChange, Date timestamp) {
for (WorkflowStateListener listener : listeners) {
listener.stepStateChanged(identifier, stepStateChange, timestamp);
}
}
public void beginWorkflowExecution(StepExecutionContext executionContext, WorkflowExecutionItem item) {
StepContextId currentStep = stepContext.getCurrentStep();
INodeEntry currentNode = stepContext.getCurrentNode();
if(null!= currentNode && null != currentStep) {
//if already node context, begin a parameterized sub workflow
//change step context to include node name parameter for the step id
HashMap<String, String> params = new HashMap<String, String>();
params.put("node", currentNode.getNodename());
stepContext.beginStepContext(StateUtils.stepContextId(currentStep.getStep(),
!currentStep.getAspect().isMain(),params));
}
stepContext.beginContext();
List<Pair<StepContextId, INodeEntry>> currentContext = stepContext.getCurrentContextPairs();
List<String> names = getNodeNames(executionContext);
if(null==currentContext ){
notifyAllWorkflowState(ExecutionState.RUNNING, new Date(), names);
}else{
notifyAllSubWorkflowState(createIdentifier(), ExecutionState.RUNNING, new Date(), names);
}
}
private List<String> getNodeNames(StepExecutionContext executionContext) {
List<INodeEntry> orderedNodes = INodeEntryComparator.rankOrderedNodes(executionContext.getNodes(),
executionContext.getNodeRankAttribute(),
executionContext.isNodeRankOrderAscending());
List<String> names = new ArrayList<String>();
for (INodeEntry orderedNode : orderedNodes) {
names.add(orderedNode.getNodename());
}
return names;
}
public void finishWorkflowExecution(WorkflowExecutionResult result, StepExecutionContext executionContext,
WorkflowExecutionItem item) {
List<Pair<StepContextId, INodeEntry>> currentContext = stepContext.getCurrentContextPairs();
if (null == currentContext || currentContext.size() < 1) {
notifyAllWorkflowState(
null != result && result.isSuccess() ? ExecutionState.SUCCEEDED : ExecutionState.FAILED,
new Date(), null);
}else{
notifyAllSubWorkflowState(createIdentifier(),
null != result && result.isSuccess() ? ExecutionState.SUCCEEDED : ExecutionState.FAILED,
new Date(), null);
}
stepContext.finishContext();
}
private StepIdentifier createIdentifier() {
return StateUtils.stepIdentifier(stepContext.getCurrentContext());
}
private StepStateChange createStepStateChange(ExecutionState executionState) {
return createStepStateChange(executionState, null);
}
private StepStateChange createStepStateChange(ExecutionState executionState, Map metadata) {
INodeEntry currentNode = stepContext.getCurrentNode();
return StateUtils.stepStateChange(StateUtils.stepState(executionState,metadata), null != currentNode ? currentNode
.getNodename() : null);
}
private StepStateChange createStepStateChange(StepExecutionResult result){
INodeEntry currentNode = stepContext.getCurrentNode();
return createStepStateChange(result, currentNode);
}
private StepStateChange createStepStateChange(StepExecutionResult result, INodeEntry currentNode) {
return StateUtils.stepStateChange(
StateUtils.stepState(
resultState(result),
resultMetadata(result),
resultMessage(result)
),
null != currentNode ? currentNode.getNodename() : null);
}
private String resultMessage(StepExecutionResult result) {
return null!=result?result.getFailureMessage():null;
}
private ExecutionState resultState(StepExecutionResult result) {
return (null!=result && result.isSuccess()) ? ExecutionState.SUCCEEDED :
ExecutionState.FAILED;
}
private Map<String, Object> resultMetadata(StepExecutionResult result) {
if (null != result && result.isSuccess()) {
return null;
}
HashMap<String, Object> map = new HashMap<String, Object>();
if (null != result && null != result.getFailureData()) {
map.putAll(result.getFailureData());
}
map.put("failureReason", null != result ? result.getFailureReason().toString() : "Unknown");
return map;
}
public void beginWorkflowItem(int step, StepExecutionItem item) {
stepContext.beginStepContext(StateUtils.stepContextId(step, false));
notifyAllStepState(createIdentifier(), createStepStateChange(ExecutionState.RUNNING), new Date());
}
@Override
public void beginWorkflowItemErrorHandler(int step, StepExecutionItem item) {
stepContext.beginStepContext(StateUtils.stepContextId(step, true));
HashMap<String,String> ehMap=new HashMap<String, String>();
ehMap.put("handlerTriggered", "true");
notifyAllStepState(createIdentifier(), createStepStateChange(ExecutionState.RUNNING_HANDLER, ehMap),
new Date());
}
public void finishWorkflowItem(int step, StepExecutionItem item, StepExecutionResult result) {
if (NodeDispatchStepExecutor.STEP_EXECUTION_TYPE.equals(item.getType()) || item instanceof NodeStepExecutionItem) {
//dont notify
} else {
notifyAllStepState(createIdentifier(), createStepStateChange(result), new Date());
}
stepContext.finishStepContext();
}
@Override
public void finishWorkflowItemErrorHandler(int step, StepExecutionItem item, StepExecutionResult result) {
//dont notify
}
public void beginExecuteNodeStep(ExecutionContext context, NodeStepExecutionItem item, INodeEntry node) {
//if node step item is not state transitionable, ignore it
stepContext.beginNodeContext(node);
StepIdentifier identifier = createIdentifier();
notifyAllStepState(identifier,
createStepStateChange(
StateUtils.last(identifier).getAspect().isMain()
? ExecutionState.RUNNING
: ExecutionState.RUNNING_HANDLER
),
new Date()
);
}
public void beginStepExecution(StepExecutor executor,StepExecutionContext context, StepExecutionItem item) {
}
public void finishStepExecution(StepExecutor executor, StatusResult result, StepExecutionContext context, StepExecutionItem item) {
}
public void finishExecuteNodeStep(NodeStepResult result, ExecutionContext context, StepExecutionItem item,
INodeEntry node) {
//if node step item is not state transitionable, ignore it
notifyAllStepState(createIdentifier(), createStepStateChange(result), new Date());
stepContext.finishNodeContext();
}
}
| |
/*L
* Copyright Oracle Inc
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cadsr-cgmdr-nci-uk/LICENSE.txt for details.
*/
/*
* eXist Open Source Native XML Database
* Copyright (C) 2001-06 The eXist Project
* http://exist-db.org
* http://exist.sourceforge.net
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id$
*/
package org.exist.xquery;
import java.io.File;
import org.custommonkey.xmlunit.XMLTestCase;
import org.exist.storage.DBBroker;
import org.exist.xmldb.DatabaseInstanceManager;
import org.w3c.dom.Element;
import org.xmldb.api.DatabaseManager;
import org.xmldb.api.base.Collection;
import org.xmldb.api.base.Database;
import org.xmldb.api.base.Resource;
import org.xmldb.api.base.ResourceIterator;
import org.xmldb.api.base.ResourceSet;
import org.xmldb.api.base.XMLDBException;
import org.xmldb.api.modules.CollectionManagementService;
import org.xmldb.api.modules.XMLResource;
import org.xmldb.api.modules.XPathQueryService;
/** concerns the Group By extension for XQuery
*
* @author Boris Verhaegen (boris.verhaegen@gmail.com)
*
* */
public class XQueryGroupByTest extends XMLTestCase {
private static final String BINARYTABLE_XML = "binaryTable.xml";
private static final String BEYER_XML = "beyer.xml";
private static final String ITEMS_XML = "items.xml";
private final static String binaryTable =
"<items>"
+ "<item><key1>1</key1><key2>1</key2></item>"
+ "<item><key1>1</key1><key2>0</key2></item>"
+ "<item><key1>0</key1><key2>1</key2></item>"
+ "<item><key1>0</key1><key2>0</key2></item>"
+ "<item><key1>1</key1><key2>1</key2></item>"
+ "<item><key1>1</key1><key2>0</key2></item>"
+ "<item><key1>0</key1><key2>1</key2></item>"
+ "<item><key1>0</key1><key2>0</key2></item>"
+"</items>";
private final static String beyer =
"<books>"+
" <book>"+
" <title>Transaction Processing</title>"+
" <publisher>Morgan Kaufmann</publisher>"+
" <year>1993</year>"+
" <price>59.00</price>"+
" <categories>"+
" <software>"+
" <db>"+
" <concurrency/>"+
" </db>"+
" <distributed/>"+
" </software>"+
" </categories>"+
" </book>"+
" <book>"+
" <title>Readings in Database Systems</title>"+
" <publisher>Morgan Kaufmann</publisher>"+
" <year>1998</year>"+
" <price>65.00</price>"+
" <categories>"+
" <software>"+
" <db/>"+
" </software>"+
" <anthology/>"+
" </categories>"+
" </book>"+
"</books>";
private final static String items =
"<items>"
+ "<item><key1>11</key1><key2>1</key2></item>"
+ "<item><key1>1</key1><key2>11</key2></item>"
+"</items>";
private Collection testCollection;
private Database database;
private CollectionManagementService testService;
public XQueryGroupByTest(String arg0) {
super(arg0);
}
protected void setUp() {
try {
// initialize driver
Class cl = Class.forName("org.exist.xmldb.DatabaseImpl");
database = (Database) cl.newInstance();
database.setProperty("create-database", "true");
DatabaseManager.registerDatabase(database);
Collection root =
DatabaseManager.getCollection("xmldb:exist://" + DBBroker.ROOT_COLLECTION, "admin", null);
testService =
(CollectionManagementService) root.getService("CollectionManagementService", "1.0");
testCollection = testService.createCollection("testGB");
assertNotNull(testCollection);
} catch (ClassNotFoundException e) {
} catch (InstantiationException e) {
} catch (IllegalAccessException e) {
} catch (XMLDBException e) {
e.printStackTrace();
}
}
protected void tearDown() throws Exception {
testService.removeCollection("testGB");
DatabaseManager.deregisterDatabase(database);
DatabaseInstanceManager dim =
(DatabaseInstanceManager) testCollection.getService(
"DatabaseInstanceManager", "1.0");
dim.shutdown();
testCollection = null;
database = null;
System.out.println("tearDown PASSED");
}
public void testGroupByOneKey(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BINARYTABLE_XML, binaryTable);
System.out.println("testGroupBy 1: ========" );
query = "for $item in //item group $item as $partition by $item/key1 "+
"as $key1 return count($partition)";
result = service.queryResource(BINARYTABLE_XML, query );
printResult(result);
assertEquals( "XQuery: " + query, 2, result.getSize() );
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testGroupByTwoKeys(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BINARYTABLE_XML, binaryTable);
System.out.println("testGroupBy 2: ========" );
query = "for $item in //item group $item as $partition by $item/key1 "+
"as $key1, $item/key2 as $key2 return count($partition)";
result = service.queryResource(BINARYTABLE_XML, query );
printResult(result);
assertEquals( "XQuery: " + query, 4, result.getSize() );
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testGroupByKeyVariable(){
ResourceSet result;
String query;
XMLResource resu;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BINARYTABLE_XML, binaryTable);
System.out.println("testGroupBy 3: ========" );
query = "for $item in //item group $item as $partition by $item/key1 "+
"as $key1 order by $key1 return $key1";
result = service.queryResource(BINARYTABLE_XML, query );
printResult(result);
resu = (XMLResource) result.getResource(0);
assertEquals( "XQuery: " + query, "0", ((Element)resu.getContentAsDOM()).getNodeValue() );
resu = (XMLResource) result.getResource(1);
assertEquals( "XQuery: " + query, "1", ((Element)resu.getContentAsDOM()).getNodeValue() );
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testGroupByLetVariable(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BINARYTABLE_XML, binaryTable);
//group by a let variable
System.out.println("testGroupBy 4: ========" );
query = "for $item in //item let $k1 := $item/key1 group $item as "+
"$partition by $k1 as $key1 return count($partition)";
result = service.queryResource(BINARYTABLE_XML, query );
printResult(result);
assertEquals( "XQuery: " + query, 2, result.getSize() );
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testGroupBySpecialFLWR(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BINARYTABLE_XML, binaryTable);
//group by in a flwr beginning by a let clause
System.out.println("testGroupBy 5: ========" );
query = "let $test := //item/key1 let $brol := //item/key2 "+
"for $item in //item let $k2 := $item/key2 group $item "+
"as $partition by $item/key1 as $key1 return count($partition)";
result = service.queryResource(BINARYTABLE_XML, query );
printResult(result);
assertEquals( "XQuery: " + query, 2, result.getSize() );
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testGroupByGroupedVariable(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BINARYTABLE_XML, binaryTable);
//test the contents of $partition
System.out.println("testGroupBy 6: ========" );
query = "for $item in //item group $item as $partition by $item/key1 "+
"as $key1, $item/key2 as $key2 order by $key1 descending, "+
"$key2 descending return <group>{$partition}</group>";
result = service.queryResource(BINARYTABLE_XML, query );
printResult(result);
assertEquals("XQuery: " + query,
"<group>\n"+
" <item>\n"+
" <key1>1</key1>\n"+
" <key2>1</key2>\n"+
" </item>\n"+
" <item>\n"+
" <key1>1</key1>\n"+
" <key2>1</key2>\n"+
" </item>\n"+
"</group>", ((XMLResource)result.getResource(0)).getContent());
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
/* in a FLWR, variables binded before groupBy clause are not in scope after the groupBy clause*/
public void testScope1(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BINARYTABLE_XML, binaryTable);
//test the contents of $partition
System.out.println("testGroupBy 7: ========" );
query = "for $item in //item group $item as $partition by "+
"$item/key1 as $key1 return <group>{$item}</group>";
result = service.queryResource(BINARYTABLE_XML, query );
printResult(result);
fail("$item variable still in scope !");
}
catch (Exception e) {
//ok, $item is not in scope
}
}
public void testScope2(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BINARYTABLE_XML, binaryTable);
//test the contents of $partition
System.out.println("testGroupBy 8: ========" );
query = "for $item in //item group $item as $partition by $item/key1 "+
"as $key1 return for $foo in $partition return "+
"<test>{$foo,$key1}</test>";
result = service.queryResource(BINARYTABLE_XML, query );
printResult(result);
assertEquals( "XQuery: " + query, 8, result.getSize() );
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testScope3(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BINARYTABLE_XML, binaryTable);
//test the contents of $partition
System.out.println("testGroupBy 7: ========" );
query = "for $item in //item return for $key in $item/key1 group "+
"$key as $partition by $item/key2 as $key2 return "+
"<test>{$partition,$item}</test>";
result = service.queryResource(BINARYTABLE_XML, query );
printResult(result);
assertEquals( "XQuery: " + query, 8, result.getSize() );
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
//test based on Kevin Beyer's publication "Extending XQuery for Analytics", Q11
//this test use a recurcive function and group books by all combination of categories.
public void testGroupByBeyerQ11(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BEYER_XML, beyer);
System.out.println("testGroupBy Beyer Q11: ========" );
query = "declare function local:paths($x as element()*) as xs:string* {\n"+
"for $i in $x\n"+
"let $name := fn:local-name-from-QName(fn:node-name($i))\n"+
"return ($name,\n"+
" for $j in local:paths($i/*)\n"+
" return fn:concat($name, \"/\", $j)\n"+
")};\n"+
"for $b in //book\n"+
"for $c in local:paths($b/categories/*)\n"+
"group $b as $partition by $c as $category\n"+
"return\n"+
"<result><category>{$category}</category> "+
"<avg-price>{avg($partition/price)}</avg-price></result>\n";
result = service.queryResource(BEYER_XML, query );
printResult(result);
assertEquals( "XQuery: " + query, 5, result.getSize() );
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
//test based on Kevin Beyer's publication "Extending XQuery for Analytics", Q12
public void testGroupByBeyerQ12(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(BEYER_XML, beyer);
System.out.println("testGroupBy Beyer Q12: ========" );
query = "declare function local:cube($dims as item()*) as item()*\n"+
"{\n"+
" if (fn:empty($dims)) then <group/>\n"+
" else for $subgroup in local:cube(fn:subsequence($dims, 2))\n"+
" return ($subgroup, <group>{$dims[1], $subgroup/*}</group>)\n"+
"};\n"+
"for $b in //book\n"+
"let $pub := <publisher>{$b/publisher}</publisher>\n"+
"for $cell in local:cube(($pub,$b/year))\n"+
"group $b as $partition by $cell as $cell2\n"+
"order by $cell2 \n"+
"return\n"+
"<result>\n"+
" {$cell2}\n"+
" <avg-price>{avg($partition//price)}</avg-price>\n"+
"</result>\n";
result = service.queryResource(BEYER_XML, query );
printResult(result);
assertEquals( "XQuery: " + query, 6, result.getSize() );
assertEquals("XQuery: " + query, "<result>\n" +
" <group/>\n" +
" <avg-price>62</avg-price>\n" +
"</result>", ((XMLResource)result.getResource(0)).getContent() );
assertEquals("XQuery: " + query,
"<result>\n" +
" <group>\n" +
" <publisher>\n" +
" <publisher>Morgan Kaufmann</publisher>\n" +
" </publisher>\n" +
" <year>1998</year>\n" +
" </group>\n" +
" <avg-price>65</avg-price>\n" +
"</result>"
, ((XMLResource)result.getResource(5)).getContent() );
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testHashKey(){
ResourceSet result;
String query;
try {
XPathQueryService service =
storeXMLStringAndGetQueryService(ITEMS_XML, items);
//test if they are two group (11,1) and (1,11) and not only one
//bug corrected with the patch 1681499 on subversion tracker
System.out.println("testGroupBy hashkey: ========" );
query = "for $item in //item group $item as $partition by $item/key1/text() "+
"as $key1, $item/key2/text() as $key2" +
" return <group/>" ;
result = service.queryResource(ITEMS_XML, query );
printResult(result);
assertEquals( "XQuery: " + query, 2, result.getSize() );
}
catch (Exception e) {
System.out.println("testGroupByClause : XMLDBException: "+e);
fail(e.getMessage());
}
}
protected XPathQueryService storeXMLStringAndGetQueryService(String documentName,
String content) throws XMLDBException {
XMLResource doc =
(XMLResource) testCollection.createResource(
documentName, "XMLResource" );
doc.setContent(content);
testCollection.storeResource(doc);
XPathQueryService service =
(XPathQueryService) testCollection.getService(
"XPathQueryService",
"1.0");
return service;
}
protected XPathQueryService storeXMLStringAndGetQueryService(String documentName
) throws XMLDBException {
XMLResource doc =
(XMLResource) testCollection.createResource(
documentName, "XMLResource" );
doc.setContent(new File(documentName));
testCollection.storeResource(doc);
XPathQueryService service =
(XPathQueryService) testCollection.getService(
"XPathQueryService",
"1.0");
return service;
}
protected void printResult(ResourceSet result) throws XMLDBException {
for (ResourceIterator i = result.getIterator();
i.hasMoreResources(); ) {
Resource r = i.nextResource();
System.out.println(r.getContent());
}
}
}
| |
package org.apache.maven.plugins.site;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.doxia.tools.SiteTool;
import org.apache.maven.execution.DefaultMavenExecutionRequest;
import org.apache.maven.execution.MavenExecutionRequest;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.testing.AbstractMojoTestCase;
import org.apache.maven.plugins.site.SimpleDavServerHandler.HttpRequest;
import org.apache.maven.plugins.site.stubs.SiteMavenProjectStub;
import org.apache.maven.settings.Proxy;
import org.apache.maven.settings.Settings;
import org.codehaus.plexus.util.ReflectionUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author <a href="mailto:olamy@apache.org">olamy</a>
* @version $Id$
*/
@RunWith(JUnit4.class)
public abstract class AbstractSiteDeployWebDavTest
extends AbstractMojoTestCase
{
File siteTargetPath = new File( getBasedir() + File.separator + "target" + File.separator + "siteTargetDeploy" );
private Logger log = LoggerFactory.getLogger( getClass() );
@Before
public void setUp()
throws Exception
{
super.setUp();
if ( !siteTargetPath.exists() )
{
siteTargetPath.mkdirs();
FileUtils.cleanDirectory( siteTargetPath );
}
}
abstract String getMojoName();
abstract AbstractMojo getMojo( File pluginXmlFile )
throws Exception;
@Test
public void noAuthzDavDeploy()
throws Exception
{
FileUtils.cleanDirectory( siteTargetPath );
SimpleDavServerHandler simpleDavServerHandler = new SimpleDavServerHandler( siteTargetPath );
try
{
File pluginXmlFile = getTestFile( "src/test/resources/unit/deploy-dav/pom.xml" );
AbstractMojo mojo = getMojo( pluginXmlFile );
assertNotNull( mojo );
SiteMavenProjectStub siteMavenProjectStub =
new SiteMavenProjectStub( "src/test/resources/unit/deploy-dav/pom.xml" );
assertTrue( "dav server port not available: " + simpleDavServerHandler.getPort(),
simpleDavServerHandler.getPort() > 0 );
siteMavenProjectStub.getDistributionManagement().getSite()
.setUrl( "dav:http://localhost:" + simpleDavServerHandler.getPort() + "/site/" );
setVariableValueToObject( mojo, "project", siteMavenProjectStub );
Settings settings = new Settings();
setVariableValueToObject( mojo, "settings", settings );
File inputDirectory = new File( "src/test/resources/unit/deploy-dav/target/site" );
setVariableValueToObject( mojo, "inputDirectory", inputDirectory );
mojo.execute();
assertContentInFiles();
assertFalse( requestsContainsProxyUse( simpleDavServerHandler.httpRequests ) );
}
finally
{
simpleDavServerHandler.stop();
}
}
@Test
public void davDeployThruProxyWithoutAuthzInProxy()
throws Exception
{
FileUtils.cleanDirectory( siteTargetPath );
SimpleDavServerHandler simpleDavServerHandler = new SimpleDavServerHandler( siteTargetPath );
try
{
File pluginXmlFile = getTestFile( "src/test/resources/unit/deploy-dav/pom.xml" );
AbstractMojo mojo = getMojo( pluginXmlFile );
assertNotNull( mojo );
SiteMavenProjectStub siteMavenProjectStub =
new SiteMavenProjectStub( "src/test/resources/unit/deploy-dav/pom.xml" );
// olamy, Note : toto is something like foo or bar for french folks :-)
String siteUrl = "dav:http://toto.com/site/";
siteMavenProjectStub.getDistributionManagement().getSite().setUrl( siteUrl );
setVariableValueToObject( mojo, "project", siteMavenProjectStub );
Settings settings = new Settings();
Proxy proxy = new Proxy();
//dummy proxy
proxy.setActive( true );
proxy.setHost( "localhost" );
proxy.setPort( simpleDavServerHandler.getPort() );
proxy.setProtocol( "http" );
proxy.setNonProxyHosts( "www.google.com|*.somewhere.com" );
settings.addProxy( proxy );
setVariableValueToObject( mojo, "settings", settings );
MavenExecutionRequest request = new DefaultMavenExecutionRequest();
request.setProxies( Arrays.asList( proxy ) );
MavenSession mavenSession = new MavenSession( getContainer(), null, request, null );
setVariableValueToObject( mojo, "mavenSession", mavenSession );
File inputDirectory = new File( "src/test/resources/unit/deploy-dav/target/site" );
setVariableValueToObject( mojo, "inputDirectory", inputDirectory );
mojo.execute();
assertContentInFiles();
assertTrue( requestsContainsProxyUse( simpleDavServerHandler.httpRequests ) );
for ( HttpRequest rq : simpleDavServerHandler.httpRequests )
{
log.info( rq.toString() );
}
}
finally
{
simpleDavServerHandler.stop();
}
}
@Test
public void davDeployThruProxyWitAuthzInProxy() throws Exception
{
FileUtils.cleanDirectory( siteTargetPath );
//SimpleDavServerHandler simpleDavServerHandler = new SimpleDavServerHandler( siteTargetPath );
Map<String, String> authentications = new HashMap<String, String>();
authentications.put( "foo", "titi" );
AuthAsyncProxyServlet servlet = new AuthAsyncProxyServlet( authentications, siteTargetPath );
SimpleDavServerHandler simpleDavServerHandler = new SimpleDavServerHandler( servlet );
try
{
File pluginXmlFile = getTestFile( "src/test/resources/unit/deploy-dav/pom.xml" );
AbstractMojo mojo = getMojo( pluginXmlFile );
assertNotNull( mojo );
SiteMavenProjectStub siteMavenProjectStub =
new SiteMavenProjectStub( "src/test/resources/unit/deploy-dav/pom.xml" );
siteMavenProjectStub.getDistributionManagement().getSite()
.setUrl( "dav:http://toto.com/site/" );
setVariableValueToObject( mojo, "project", siteMavenProjectStub );
Settings settings = new Settings();
Proxy proxy = new Proxy();
//dummy proxy
proxy.setActive( true );
proxy.setHost( "localhost" );
proxy.setPort( simpleDavServerHandler.getPort() );
proxy.setProtocol( "dav" );
proxy.setUsername( "foo" );
proxy.setPassword( "titi" );
proxy.setNonProxyHosts( "www.google.com|*.somewhere.com" );
settings.addProxy( proxy );
setVariableValueToObject( mojo, "settings", settings );
MavenExecutionRequest request = new DefaultMavenExecutionRequest();
request.setProxies( Arrays.asList( proxy ) );
MavenSession mavenSession = new MavenSession( getContainer(), null, request, null );
setVariableValueToObject( mojo, "mavenSession", mavenSession );
File inputDirectory = new File( "src/test/resources/unit/deploy-dav/target/site" );
// test which mojo we are using
if ( ReflectionUtils.getFieldByNameIncludingSuperclasses( "inputDirectory", mojo.getClass() ) != null )
{
setVariableValueToObject( mojo, "inputDirectory", inputDirectory );
}
else
{
ArtifactRepositoryFactory artifactRepositoryFactory = getContainer().lookup( ArtifactRepositoryFactory.class );
setVariableValueToObject( mojo, "stagingDirectory", inputDirectory );
setVariableValueToObject( mojo, "reactorProjects", Collections.emptyList() );
setVariableValueToObject( mojo, "localRepository",
artifactRepositoryFactory.createArtifactRepository( "local", "foo", "default",
null, null ) );
setVariableValueToObject( mojo, "siteTool", getContainer().lookup( SiteTool.class ) );
setVariableValueToObject( mojo, "siteDirectory", new File("foo") );
setVariableValueToObject( mojo, "repositories", Collections.emptyList() );
}
mojo.execute();
assertContentInFiles();
assertTrue( requestsContainsProxyUse( servlet.httpRequests ) );
assertAtLeastOneRequestContainsHeader( servlet.httpRequests, "Proxy-Authorization" );
for ( HttpRequest rq : servlet.httpRequests )
{
log.info( rq.toString() );
}
}
finally
{
simpleDavServerHandler.stop();
}
}
private void assertContentInFiles()
throws Exception
{
File fileToTest = new File( siteTargetPath, "site" + File.separator + "index.html" );
assertTrue( fileToTest.exists() );
String fileContent = FileUtils.readFileToString( fileToTest );
assertTrue( fileContent.contains( "Welcome to Apache Maven" ) );
fileToTest = new File( siteTargetPath, "site" + File.separator + "css" + File.separator + "maven-base.css" );
assertTrue( fileToTest.exists() );
fileContent = FileUtils.readFileToString( fileToTest );
assertTrue( fileContent.contains( "background-image: url(../images/collapsed.gif);" ) );
}
/**
* @param requests
* @return true if at least on request use proxy http header Proxy-Connection : Keep-Alive
*/
private boolean requestsContainsProxyUse( List<HttpRequest> requests )
{
return assertAtLeastOneRequestContainsHeader( requests, "Proxy-Connection" );
}
private boolean assertAtLeastOneRequestContainsHeader( List<HttpRequest> requests, String headerName )
{
for ( HttpRequest rq : requests )
{
boolean containsProxyHeader = rq.headers.containsKey( headerName );
if ( containsProxyHeader )
{
return true;
}
}
return false;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.containerregistry.v2018_02_01_preview.implementation;
import com.microsoft.azure.management.containerregistry.v2018_02_01_preview.Webhook;
import com.microsoft.azure.arm.model.implementation.CreatableUpdatableImpl;
import rx.Observable;
import com.microsoft.azure.management.containerregistry.v2018_02_01_preview.WebhookUpdateParameters;
import java.util.Map;
import java.util.List;
import com.microsoft.azure.management.containerregistry.v2018_02_01_preview.WebhookCreateParameters;
import com.microsoft.azure.management.containerregistry.v2018_02_01_preview.WebhookAction;
import com.microsoft.azure.management.containerregistry.v2018_02_01_preview.ProvisioningState;
import com.microsoft.azure.management.containerregistry.v2018_02_01_preview.WebhookStatus;
import rx.functions.Func1;
class WebhookImpl extends CreatableUpdatableImpl<Webhook, WebhookInner, WebhookImpl> implements Webhook, Webhook.Definition, Webhook.Update {
private final ContainerRegistryManager manager;
private String resourceGroupName;
private String registryName;
private String webhookName;
private WebhookCreateParameters createParameter;
private WebhookUpdateParameters updateParameter;
WebhookImpl(String name, ContainerRegistryManager manager) {
super(name, new WebhookInner());
this.manager = manager;
// Set resource name
this.webhookName = name;
//
this.createParameter = new WebhookCreateParameters();
this.updateParameter = new WebhookUpdateParameters();
}
WebhookImpl(WebhookInner inner, ContainerRegistryManager manager) {
super(inner.name(), inner);
this.manager = manager;
// Set resource name
this.webhookName = inner.name();
// set resource ancestor and positional variables
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.registryName = IdParsingUtils.getValueFromIdByName(inner.id(), "registries");
this.webhookName = IdParsingUtils.getValueFromIdByName(inner.id(), "webhooks");
//
this.createParameter = new WebhookCreateParameters();
this.updateParameter = new WebhookUpdateParameters();
}
@Override
public ContainerRegistryManager manager() {
return this.manager;
}
@Override
public Observable<Webhook> createResourceAsync() {
WebhooksInner client = this.manager().inner().webhooks();
return client.createAsync(this.resourceGroupName, this.registryName, this.webhookName, this.createParameter)
.map(new Func1<WebhookInner, WebhookInner>() {
@Override
public WebhookInner call(WebhookInner resource) {
resetCreateUpdateParameters();
return resource;
}
})
.map(innerToFluentMap(this));
}
@Override
public Observable<Webhook> updateResourceAsync() {
WebhooksInner client = this.manager().inner().webhooks();
return client.updateAsync(this.resourceGroupName, this.registryName, this.webhookName, this.updateParameter)
.map(new Func1<WebhookInner, WebhookInner>() {
@Override
public WebhookInner call(WebhookInner resource) {
resetCreateUpdateParameters();
return resource;
}
})
.map(innerToFluentMap(this));
}
@Override
protected Observable<WebhookInner> getInnerAsync() {
WebhooksInner client = this.manager().inner().webhooks();
return client.getAsync(this.resourceGroupName, this.registryName, this.webhookName);
}
@Override
public boolean isInCreateMode() {
return this.inner().id() == null;
}
private void resetCreateUpdateParameters() {
this.createParameter = new WebhookCreateParameters();
this.updateParameter = new WebhookUpdateParameters();
}
@Override
public List<WebhookAction> actions() {
return this.inner().actions();
}
@Override
public String id() {
return this.inner().id();
}
@Override
public String location() {
return this.inner().location();
}
@Override
public String name() {
return this.inner().name();
}
@Override
public ProvisioningState provisioningState() {
return this.inner().provisioningState();
}
@Override
public String scope() {
return this.inner().scope();
}
@Override
public WebhookStatus status() {
return this.inner().status();
}
@Override
public Map<String, String> tags() {
return this.inner().getTags();
}
@Override
public String type() {
return this.inner().type();
}
@Override
public WebhookImpl withExistingRegistry(String resourceGroupName, String registryName) {
this.resourceGroupName = resourceGroupName;
this.registryName = registryName;
return this;
}
@Override
public WebhookImpl withLocation(String location) {
this.createParameter.withLocation(location);
return this;
}
@Override
public WebhookImpl withActions(List<WebhookAction> actions) {
if (isInCreateMode()) {
this.createParameter.withActions(actions);
} else {
this.updateParameter.withActions(actions);
}
return this;
}
@Override
public WebhookImpl withServiceUri(String serviceUri) {
if (isInCreateMode()) {
this.createParameter.withServiceUri(serviceUri);
} else {
this.updateParameter.withServiceUri(serviceUri);
}
return this;
}
@Override
public WebhookImpl withCustomHeaders(Map<String, String> customHeaders) {
if (isInCreateMode()) {
this.createParameter.withCustomHeaders(customHeaders);
} else {
this.updateParameter.withCustomHeaders(customHeaders);
}
return this;
}
@Override
public WebhookImpl withScope(String scope) {
if (isInCreateMode()) {
this.createParameter.withScope(scope);
} else {
this.updateParameter.withScope(scope);
}
return this;
}
@Override
public WebhookImpl withStatus(WebhookStatus status) {
if (isInCreateMode()) {
this.createParameter.withStatus(status);
} else {
this.updateParameter.withStatus(status);
}
return this;
}
@Override
public WebhookImpl withTags(Map<String, String> tags) {
if (isInCreateMode()) {
this.createParameter.withTags(tags);
} else {
this.updateParameter.withTags(tags);
}
return this;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.app.tpce.jpa.entity;
import java.io.Serializable;
import javax.persistence.*;
import java.util.Date;
import java.util.Set;
/**
* The persistent class for the CUSTOMER database table.
*
*/
@Entity
@Table(name="CUSTOMER")
public class Customer implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@Column(name="C_ID", unique=true, nullable=false)
private long cId;
@Column(name="C_AREA_1", length=3)
private String cArea1;
@Column(name="C_AREA_2", length=3)
private String cArea2;
@Column(name="C_AREA_3", length=3)
private String cArea3;
@Column(name="C_CTRY_1", length=3)
private String cCtry1;
@Column(name="C_CTRY_2", length=3)
private String cCtry2;
@Column(name="C_CTRY_3", length=3)
private String cCtry3;
@Temporal(TemporalType.DATE)
@Column(name="C_DOB", nullable=false)
private Date cDob;
@Column(name="C_EMAIL_1", length=50)
private String cEmail1;
@Column(name="C_EMAIL_2", length=50)
private String cEmail2;
@Column(name="C_EXT_1", length=5)
private String cExt1;
@Column(name="C_EXT_2", length=5)
private String cExt2;
@Column(name="C_EXT_3", length=5)
private String cExt3;
@Column(name="C_F_NAME", nullable=false, length=20)
private String cFName;
@Column(name="C_GNDR", length=1)
private String cGndr;
@Column(name="C_L_NAME", nullable=false, length=25)
private String cLName;
@Column(name="C_LOCAL_1", length=10)
private String cLocal1;
@Column(name="C_LOCAL_2", length=10)
private String cLocal2;
@Column(name="C_LOCAL_3", length=10)
private String cLocal3;
@Column(name="C_M_NAME", length=1)
private String cMName;
@Column(name="C_TAX_ID", nullable=false, length=20)
private String cTaxId;
@Column(name="C_TIER", nullable=false)
private short cTier;
//If only address c_ad_id is needed, defining this field will avoid invoking proxy to get its value
@Column(name="C_AD_ID", nullable=false, insertable=false, updatable=false)
private long cAdId;
//bi-directional many-to-one association to Address
@ManyToOne(fetch=FetchType.LAZY)
@JoinColumn(name="C_AD_ID", nullable=false)
private Address address;
@Column(name="C_ST_ID", nullable=false, insertable=false, updatable=false)
private String cStId;
//bi-directional many-to-one association to StatusType
@ManyToOne(fetch=FetchType.LAZY)
@JoinColumn(name="C_ST_ID", nullable=false)
private StatusType statusType;
//bi-directional many-to-many association to Taxrate
@ManyToMany
@JoinTable(
name="CUSTOMER_TAXRATE"
, joinColumns={
@JoinColumn(name="CX_C_ID", nullable=false)
}
, inverseJoinColumns={
@JoinColumn(name="CX_TX_ID", nullable=false)
}
)
private Set<Taxrate> taxrates;
//bi-directional many-to-one association to CustomerAccount
@OneToMany(mappedBy="customer")
private Set<CustomerAccount> customerAccounts;
//bi-directional many-to-one association to WatchList
@OneToMany(mappedBy="customer")
private Set<WatchList> watchLists;
public Customer() {
}
public long getCId() {
return this.cId;
}
public void setCId(long cId) {
this.cId = cId;
}
public String getCArea1() {
return this.cArea1;
}
public void setCArea1(String cArea1) {
this.cArea1 = cArea1;
}
public String getCArea2() {
return this.cArea2;
}
public void setCArea2(String cArea2) {
this.cArea2 = cArea2;
}
public String getCArea3() {
return this.cArea3;
}
public void setCArea3(String cArea3) {
this.cArea3 = cArea3;
}
public String getCCtry1() {
return this.cCtry1;
}
public void setCCtry1(String cCtry1) {
this.cCtry1 = cCtry1;
}
public String getCCtry2() {
return this.cCtry2;
}
public void setCCtry2(String cCtry2) {
this.cCtry2 = cCtry2;
}
public String getCCtry3() {
return this.cCtry3;
}
public void setCCtry3(String cCtry3) {
this.cCtry3 = cCtry3;
}
public Date getCDob() {
return this.cDob;
}
public void setCDob(Date cDob) {
this.cDob = cDob;
}
public String getCEmail1() {
return this.cEmail1;
}
public void setCEmail1(String cEmail1) {
this.cEmail1 = cEmail1;
}
public String getCEmail2() {
return this.cEmail2;
}
public void setCEmail2(String cEmail2) {
this.cEmail2 = cEmail2;
}
public String getCExt1() {
return this.cExt1;
}
public void setCExt1(String cExt1) {
this.cExt1 = cExt1;
}
public String getCExt2() {
return this.cExt2;
}
public void setCExt2(String cExt2) {
this.cExt2 = cExt2;
}
public String getCExt3() {
return this.cExt3;
}
public void setCExt3(String cExt3) {
this.cExt3 = cExt3;
}
public String getCFName() {
return this.cFName;
}
public void setCFName(String cFName) {
this.cFName = cFName;
}
public String getCGndr() {
return this.cGndr;
}
public void setCGndr(String cGndr) {
this.cGndr = cGndr;
}
public String getCLName() {
return this.cLName;
}
public void setCLName(String cLName) {
this.cLName = cLName;
}
public String getCLocal1() {
return this.cLocal1;
}
public void setCLocal1(String cLocal1) {
this.cLocal1 = cLocal1;
}
public String getCLocal2() {
return this.cLocal2;
}
public void setCLocal2(String cLocal2) {
this.cLocal2 = cLocal2;
}
public String getCLocal3() {
return this.cLocal3;
}
public void setCLocal3(String cLocal3) {
this.cLocal3 = cLocal3;
}
public String getCMName() {
return this.cMName;
}
public void setCMName(String cMName) {
this.cMName = cMName;
}
public String getCTaxId() {
return this.cTaxId;
}
public void setCTaxId(String cTaxId) {
this.cTaxId = cTaxId;
}
public short getCTier() {
return this.cTier;
}
public void setCTier(short cTier) {
this.cTier = cTier;
}
public Address getAddress() {
return this.address;
}
public void setAddress(Address address) {
this.address = address;
}
public long getCAdId() {
return this.cAdId;
}
public void setCAdId(long cAdId) {
this.cAdId = cAdId;
}
public StatusType getStatusType() {
return this.statusType;
}
public void setStatusType(StatusType statusType) {
this.statusType = statusType;
}
public String getCStId() {
return this.cStId;
}
public void setCStId(String cStId) {
this.cStId = cStId;
}
public Set<Taxrate> getTaxrates() {
return this.taxrates;
}
public void setTaxrates(Set<Taxrate> taxrates) {
this.taxrates = taxrates;
}
public Set<CustomerAccount> getCustomerAccounts() {
return this.customerAccounts;
}
public void setCustomerAccounts(Set<CustomerAccount> customerAccounts) {
this.customerAccounts = customerAccounts;
}
public Set<WatchList> getWatchLists() {
return this.watchLists;
}
public void setWatchLists(Set<WatchList> watchLists) {
this.watchLists = watchLists;
}
}
| |
/*
Derby - Class org.apache.derby.impl.sql.compile.IsNullNode
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.sql.compile;
import java.sql.Types;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.reference.ClassName;
import org.apache.derby.iapi.services.compiler.MethodBuilder;
import org.apache.derby.iapi.services.context.ContextManager;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.iapi.sql.compile.Optimizable;
import org.apache.derby.iapi.store.access.ScanController;
import org.apache.derby.iapi.types.DataTypeDescriptor;
import org.apache.derby.iapi.types.DataValueDescriptor;
import org.apache.derby.iapi.types.Orderable;
import org.apache.derby.iapi.types.TypeId;
/**
* This node represents either a unary
* IS NULL or IS NOT NULL comparison operator
*
*/
public final class IsNullNode extends UnaryComparisonOperatorNode
implements RelationalOperator
{
private DataValueDescriptor nullValue;
/**
* If {@code true}, this node represents a NOT NULL node rather than a
* NULL node. Note that this state is mutable, cf {@link #getNegation}.
*/
private boolean notNull;
IsNullNode(ValueNode operand, boolean notNull, ContextManager cm)
throws StandardException {
super(operand, cm);
this.notNull = notNull;
updateOperatorDetails();
}
private void updateOperatorDetails()
{
setOperator(notNull ? "is not null" : "is null");
setMethodName(notNull ? "isNotNull" : "isNullOp");
}
/**
* Negate the comparison.
*
* @param operand The operand of the operator
*
* @return UnaryOperatorNode The negated expression
*
* @exception StandardException Thrown on error
*/
UnaryOperatorNode getNegation(ValueNode operand)
throws StandardException
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(getTypeServices() != null,
"dataTypeServices is expected to be non-null");
}
notNull = !notNull;
updateOperatorDetails();
return this;
}
/**
* Bind a ? parameter operand of the IS [NOT] NULL predicate.
*
* @exception StandardException Thrown on error
*/
@Override
void bindParameter()
throws StandardException
{
/*
** If IS [NOT] NULL has a ? operand, we assume
** its type is varchar with the implementation-defined maximum length
** for a varchar.
** Also, for IS [NOT] NULL, it doesn't matter what is VARCHAR's
** collation (since for NULL check, no collation sensitive processing
** is required) and hence we will not worry about the collation setting
*/
operand.setType(new DataTypeDescriptor(TypeId.getBuiltInTypeId(Types.VARCHAR), true));
}
/* RelationalOperator interface */
/** @see RelationalOperator#usefulStartKey */
public boolean usefulStartKey(Optimizable optTable)
{
// IS NULL is start/stop key, IS NOT NULL is not
return (isNullNode());
}
/** @see RelationalOperator#usefulStopKey */
public boolean usefulStopKey(Optimizable optTable)
{
// IS NULL is start/stop key, IS NOT NULL is not
return (isNullNode());
}
/** @see RelationalOperator#getStartOperator */
@Override
public int getStartOperator(Optimizable optTable)
{
if (SanityManager.DEBUG) {
if (notNull) {
SanityManager.THROWASSERT("NOT NULL not expected here");
}
}
return ScanController.GE;
}
/** @see RelationalOperator#getStopOperator */
@Override
public int getStopOperator(Optimizable optTable)
{
if (SanityManager.DEBUG) {
if (notNull) {
SanityManager.THROWASSERT("NOT NULL not expected here");
}
}
return ScanController.GT;
}
/** @see RelationalOperator#generateOperator */
public void generateOperator(MethodBuilder mb,
Optimizable optTable)
{
mb.push(Orderable.ORDER_OP_EQUALS);
}
/** @see RelationalOperator#generateNegate */
public void generateNegate(MethodBuilder mb,
Optimizable optTable)
{
mb.push(notNull);
}
/** @see RelationalOperator#getOperator */
public int getOperator()
{
return notNull ? IS_NOT_NULL_RELOP : IS_NULL_RELOP;
}
/** @see RelationalOperator#compareWithKnownConstant */
public boolean compareWithKnownConstant(Optimizable optTable, boolean considerParameters)
{
return true;
}
/**
* @see RelationalOperator#getCompareValue
*
* @exception StandardException Thrown on error
*/
public DataValueDescriptor getCompareValue(Optimizable optTable)
throws StandardException
{
if (nullValue == null)
{
nullValue = operand.getTypeServices().getNull();
}
return nullValue;
}
/** @see RelationalOperator#equalsComparisonWithConstantExpression */
public boolean equalsComparisonWithConstantExpression(Optimizable optTable)
{
// Always return false for NOT NULL
if (notNull)
{
return false;
}
boolean retval = false;
/*
** Is the operand a column in the given table?
*/
if (operand instanceof ColumnReference)
{
int tabNum = ((ColumnReference) operand).getTableNumber();
if (optTable.hasTableNumber() &&
(optTable.getTableNumber() == tabNum))
{
retval = true;
}
}
return retval;
}
/**
* @see RelationalOperator#getTransitiveSearchClause
*
* @exception StandardException thrown on error
*/
public RelationalOperator getTransitiveSearchClause(ColumnReference otherCR)
throws StandardException
{
return new IsNullNode(otherCR, notNull, getContextManager());
}
/**
* null operators are defined on DataValueDescriptor.
* Overrides method in UnaryOperatorNode for code generation purposes.
*/
@Override
String getReceiverInterfaceName() {
return ClassName.DataValueDescriptor;
}
@Override
public double selectivity(Optimizable optTable)
{
if (notNull) {
/* IS NOT NULL is like <>, so should have same selectivity */
return 0.9d;
} else {
/** IS NULL is like =, so should have the same selectivity */
return 0.1d;
}
}
boolean isNullNode()
{
return !notNull;
}
/** @see ValueNode#isRelationalOperator */
@Override
boolean isRelationalOperator()
{
return true;
}
/** @see ValueNode#optimizableEqualityNode */
@Override
boolean optimizableEqualityNode(Optimizable optTable,
int columnNumber,
boolean isNullOkay)
{
if (!isNullNode() || !isNullOkay)
return false;
ColumnReference cr = getColumnOperand(optTable,
columnNumber);
if (cr == null)
{
return false;
}
return true;
}
}
| |
package com.littlesparkle.growler.core.ui.view;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.RelativeLayout;
import com.littlesparkle.growler.core.R;
/*
* Copyright (C) 2016-2016, The Little-Sparkle Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class ElegantNumberButton extends RelativeLayout implements View.OnFocusChangeListener,
TextWatcher {
private Context context;
private AttributeSet attrs;
private int styleAttr;
private OnClickListener mListener;
private float initialNumber;
private float lastNumber;
private float currentNumber;
private int finalNumber;
private float stepNumber;
private EditText mEditText;
private View view;
private OnValueChangeListener mOnValueChangeListener;
public ElegantNumberButton(Context context) {
super(context);
this.context = context;
initView();
}
public ElegantNumberButton(Context context, AttributeSet attrs) {
super(context, attrs);
this.context = context;
this.attrs = attrs;
initView();
}
public ElegantNumberButton(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
this.context = context;
this.attrs = attrs;
this.styleAttr = defStyleAttr;
initView();
}
private void initView() {
this.view = this;
inflate(context, R.layout.elegant_number_button, this);
final Resources res = getResources();
final int defaultColor = res.getColor(R.color.button_request_enabled);
final int defaultTextColor = res.getColor(R.color.text_color_title);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.ElegantNumberButton,
styleAttr, 0);
initialNumber = a.getInt(R.styleable.ElegantNumberButton_initialNumber, 0);
finalNumber = a.getInt(R.styleable.ElegantNumberButton_finalNumber, Integer.MAX_VALUE);
stepNumber = a.getFloat(R.styleable.ElegantNumberButton_stepNumber, 1f);
float textSize = a.getDimension(R.styleable.ElegantNumberButton_textSize, 13);
int color = a.getColor(R.styleable.ElegantNumberButton_backGroundColor, defaultColor);
int textColor = a.getColor(R.styleable.ElegantNumberButton_textColor, defaultTextColor);
Drawable drawable = a.getDrawable(R.styleable.ElegantNumberButton_backgroundDrawable);
Button button1 = (Button) findViewById(R.id.subtract_btn);
Button button2 = (Button) findViewById(R.id.add_btn);
mEditText = (EditText) findViewById(R.id.number_counter);
mEditText.setOnFocusChangeListener(this);
RelativeLayout mLayout = (RelativeLayout) findViewById(R.id.layout);
button1.setTextColor(textColor);
button2.setTextColor(textColor);
mEditText.setTextColor(textColor);
button1.setTextSize(textSize);
button2.setTextSize(textSize);
mEditText.setTextSize(textSize);
assert drawable != null;
if (Build.VERSION.SDK_INT > 16)
mLayout.setBackground(drawable);
else
mLayout.setBackgroundDrawable(drawable);
mEditText.addTextChangedListener(this);
mEditText.setText(String.valueOf(initialNumber));
// mEditText.setEnabled(false);
currentNumber = initialNumber;
lastNumber = initialNumber;
button1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View mView) {
float num = 0;
try {
num = Float.valueOf(mEditText.getText().toString());
} catch (NumberFormatException e) {
}
setNumber(String.format("%.2f", num - stepNumber), true);
}
});
button2.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View mView) {
float num = 0;
try {
num = Float.valueOf(mEditText.getText().toString());
} catch (NumberFormatException e) {
}
setNumber(String.format("%.2f", num + stepNumber), true);
}
});
a.recycle();
}
private void callListener(View view) {
if (mListener != null) {
mListener.onClick(view);
}
if (mOnValueChangeListener != null) {
if (lastNumber != currentNumber) {
mOnValueChangeListener.onValueChange(this, lastNumber, currentNumber);
}
}
}
public String getNumber() {
checkNumber(mEditText.getText().toString());
return mEditText.getText().toString();
}
// public void unFocus() {
// mEditText.clearFocus();
// mEditText.setFocusable(false);
// }
public void setNumber(String number) {
lastNumber = currentNumber;
this.currentNumber = Float.parseFloat(number);
if (this.currentNumber > finalNumber) {
this.currentNumber = finalNumber;
}
if (this.currentNumber < initialNumber) {
this.currentNumber = initialNumber;
}
mEditText.setText(String.valueOf(currentNumber));
}
public void setNumber(String number, boolean notifyListener) {
setNumber(number);
if (notifyListener) {
callListener(this);
}
}
public void setOnClickListener(OnClickListener onClickListener) {
this.mListener = onClickListener;
}
public void setOnValueChangeListener(OnValueChangeListener onValueChangeListener) {
mOnValueChangeListener = onValueChangeListener;
}
@Override
public void onFocusChange(View v, boolean hasFocus) {
if (!hasFocus) {
checkNumber(mEditText.getText().toString());
}
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
if (!"".equals(s.toString())) {
try {
Float aFloat = Float.valueOf(s.toString());
if (aFloat > finalNumber) {
setNumber(String.format("%.2f", finalNumber * 1f), true);
}
if (s.length() - s.toString().lastIndexOf(".") > 3) {
setNumber(String.format("%.2f", aFloat), true);
}
} catch (NumberFormatException e) {
}
}
}
@Override
public void afterTextChanged(Editable s) {
}
public interface OnClickListener {
void onClick(View view);
}
public interface OnValueChangeListener {
void onValueChange(ElegantNumberButton view, float oldValue, float newValue);
}
public void setRange(Integer startingNumber, Integer endingNumber) {
this.initialNumber = startingNumber;
this.finalNumber = endingNumber;
}
@Override
public boolean dispatchTouchEvent(MotionEvent ev) {
mEditText.requestFocus();
return super.dispatchTouchEvent(ev);
}
private void checkNumber(String str) {
if (str != null) {
int length = str.length();
if (length == 0) {
setNumber("0.00", true);
}
try {
Float aFloat = Float.valueOf(str.toString());
if (aFloat >= 0 && aFloat <= finalNumber) {
setNumber(String.format("%.2f", aFloat), true);
} else {
setNumber("0.00", true);
}
} catch (NumberFormatException e) {
setNumber("0.00", true);
}
}
}
}
| |
/**
* ComplexCurrencyTable.java
* created: 28.08.2005 15:04:10
* (c) 2005 by <a href="http://Wolschon.biz">Wolschon Softwaredesign und Beratung</a>
*/
package biz.wolschon.finance;
//automatically created logger for debug and error -output
import java.io.Serializable;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import biz.wolschon.fileformats.gnucash.GnucashFile;
import biz.wolschon.numbers.FixedPointNumber;
/**
* (c) 2005 by <a href="http://Wolschon.biz>Wolschon Softwaredesign und Beratung</a>.<br/>
* Project: gnucashReader<br/>
* ComplexCurrencyTable.java<br/>
* created: 28.08.2005 15:04:10 <br/>
*<br/>
*
* Currency-Table that can work with multiple namespaces.<br/>
* By default "ISO4217"-GnucashFile.getDefaultCurrencyID() is added with the value 1. (to be used as a base.currency)
*
* @author <a href="mailto:Marcus@Wolschon.biz">Marcus Wolschon</a>
* @see GnucashFile#getDefaultCurrencyID()
*/
public class ComplexCurrencyTable extends SimpleCurrencyTable implements Serializable {
public interface ComplexCurrencyTableChangeListener {
void conversionFactorChanged(final String namespace, final String currency, final FixedPointNumber factor);
}
private transient volatile List<ComplexCurrencyTableChangeListener> listeners = null;
public void addComplexCurrencyTableChangeListener(final ComplexCurrencyTableChangeListener listener) {
if (listeners == null) {
listeners = new LinkedList<ComplexCurrencyTableChangeListener>();
}
listeners.add(listener);
}
public void removeComplexCurrencyTableChangeListener(final ComplexCurrencyTableChangeListener listener) {
if (listeners == null) {
listeners = new LinkedList<ComplexCurrencyTableChangeListener>();
}
listeners.remove(listener);
}
protected void fireCurrencyTableChanged(final String namespace, final String currency, final FixedPointNumber factor) {
if (listeners != null) {
for (ComplexCurrencyTableChangeListener listener : listeners) {
listener.conversionFactorChanged(namespace, currency, factor);
}
}
}
/**
* Namespace is e.g. "ISO4217" or "FUND"
*/
private Map<String, SimpleCurrencyTable> namespace2CurrencyTable;
/**
* Automatically created logger for debug and error-output.
*/
private static final Log LOGGER = LogFactory.getLog(ComplexCurrencyTable.class);
//------------------------ support for propertyChangeListeners ------------------
//
///**
// * support for firing PropertyChangeEvents
// * (gets initialized only if we really have listeners)
// */
//protected volatile PropertyChangeSupport propertyChange = null;
//
///**
// * Add a PropertyChangeListener to the listener list.
// * The listener is registered for all properties.
// *
// * @param listener The PropertyChangeListener to be added
// */
//public final void addPropertyChangeListener(
// final PropertyChangeListener listener) {
// if (propertyChange == null) {
// propertyChange = new PropertyChangeSupport(this);
// }
// propertyChange.addPropertyChangeListener(listener);
//}
//
///**
// * Add a PropertyChangeListener for a specific property. The listener
// * will be invoked only when a call on firePropertyChange names that
// * specific property.
// *
// * @param propertyName The name of the property to listen on.
// * @param listener The PropertyChangeListener to be added
// */
//public final void addPropertyChangeListener(
// final String propertyName,
// final PropertyChangeListener listener) {
// if (propertyChange == null) {
// propertyChange = new PropertyChangeSupport(this);
// }
// propertyChange.addPropertyChangeListener(propertyName, listener);
//}
//
///**
// * Remove a PropertyChangeListener for a specific property.
// *
// * @param propertyName The name of the property that was listened on.
// * @param listener The PropertyChangeListener to be removed
// */
//public final void removePropertyChangeListener(
// final String propertyName,
// final PropertyChangeListener listener) {
// if (propertyChange != null) {
// propertyChange.removePropertyChangeListener(propertyName, listener);
// }
//}
//
///**
// * Remove a PropertyChangeListener from the listener list.
// * This removes a PropertyChangeListener that was registered
// * for all properties.
// *
// * @param listener The PropertyChangeListener to be removed
// */
//public synchronized void removePropertyChangeListener(
// final PropertyChangeListener listener) {
// if (propertyChange != null) {
// propertyChange.removePropertyChangeListener(listener);
// }
//}
//
//-------------------------------------------------------
/**
* Just an overridden ToString to return this classe's name
* and hashCode.
* @return className and hashCode
*/
@Override
public String toString() {
return "ComplexCurrencyTable@" + hashCode();
}
/**
* Add a new namespace with no conversion-factors.<br/>
* Will not overwrite an existing namespace.
* @param namespace the new namespace to add.
*/
public void addNameSpace(final String namespace) {
if (getNamespace(namespace) != null) {
return;
}
SimpleCurrencyTable currencyTable = new SimpleCurrencyTable();
currencyTable.clear();
addNameSpace(namespace, currencyTable);
}
/**
* Add a new namespace with an initial set of conversion-factors.
* @param namespace the new namespace to add.
* @param values an initial set of conversion-factors.
*/
public void addNameSpace(final String namespace,
final SimpleCurrencyTable values) {
if (namespace2CurrencyTable == null) {
namespace2CurrencyTable = new HashMap<String, SimpleCurrencyTable>();
}
namespace2CurrencyTable.put(namespace, values);
}
/**
* @see biz.wolschon.finance.SimpleCurrencyTable#clear()
*/
@Override
public void clear() {
super.clear();
if (namespace2CurrencyTable == null) {
namespace2CurrencyTable = new HashMap();
}
namespace2CurrencyTable.clear();
}
/**
* @see biz.wolschon.finance.SimpleCurrencyTable#convertFromBaseCurrency(biz.wolschon.numbers.FixedPointNumber, java.lang.String)
*/
@Override
public boolean convertFromBaseCurrency(FixedPointNumber pValue, String pIso4217CurrencyCode) {
if (pIso4217CurrencyCode == null) {
throw new IllegalArgumentException("null currency-id given!");
}
return convertFromBaseCurrency("ISO4217", pValue, pIso4217CurrencyCode);
}
/**
* @param namespace e.g. "ISO4217"
* @see SimpleCurrencyTable#convertFromBaseCurrency(FixedPointNumber, String)
*/
public boolean convertToBaseCurrency(final String namespace,
final FixedPointNumber pValue,
final String pIso4217CurrencyCode) {
if (namespace == null) {
throw new IllegalArgumentException("null namepace given!");
}
if (pIso4217CurrencyCode == null) {
throw new IllegalArgumentException("null currency-id given!");
}
SimpleCurrencyTable table = getNamespace(namespace);
if (table == null) {
return false;
}
return table.convertToBaseCurrency(pValue, pIso4217CurrencyCode);
}
/**
* @param namespace e.g. "ISO4217"
* @see SimpleCurrencyTable#convertFromBaseCurrency(FixedPointNumber, String)
*/
public boolean convertFromBaseCurrency(final String namespace,
final FixedPointNumber pValue,
final String pIso4217CurrencyCode) {
if (namespace == null) {
throw new IllegalArgumentException("null namepace given!");
}
if (pIso4217CurrencyCode == null) {
throw new IllegalArgumentException("null currency-id given!");
}
SimpleCurrencyTable table = getNamespace(namespace);
if (table == null) {
return false;
}
return table.convertFromBaseCurrency(pValue, pIso4217CurrencyCode);
}
/**
* @see biz.wolschon.finance.SimpleCurrencyTable#convertToBaseCurrency(biz.wolschon.numbers.FixedPointNumber, java.lang.String)
*/
@Override
public boolean convertToBaseCurrency(final FixedPointNumber pValue,
final String pIso4217CurrencyCode) {
if (pIso4217CurrencyCode == null) {
throw new IllegalArgumentException("null currency-id given!");
}
return convertToBaseCurrency("ISO4217", pValue, pIso4217CurrencyCode);
}
/**
* @see biz.wolschon.finance.SimpleCurrencyTable#getConversionFactor(java.lang.String)
*/
@Override
public FixedPointNumber getConversionFactor(final String pIso4217CurrencyCode) {
if (pIso4217CurrencyCode == null) {
throw new IllegalArgumentException("null currency-id given!");
}
return getConversionFactor("ISO4217", pIso4217CurrencyCode);
}
/**
* @see biz.wolschon.finance.SimpleCurrencyTable#setConversionFactor(java.lang.String, biz.wolschon.numbers.FixedPointNumber)
*/
@Override
public void setConversionFactor(final String pIso4217CurrencyCode,
final FixedPointNumber pFactor) {
if (pIso4217CurrencyCode == null) {
throw new IllegalArgumentException("null currency-id given!");
}
if (pFactor == null) {
throw new IllegalArgumentException("null conversion-factor given!");
}
setConversionFactor("ISO4217", pIso4217CurrencyCode, pFactor);
fireCurrencyTableChanged("ISO4217", pIso4217CurrencyCode, pFactor);
}
/**
* If the namespace does not exist yet, it is created.
* @see biz.wolschon.finance.SimpleCurrencyTable#setConversionFactor(java.lang.String, biz.wolschon.numbers.FixedPointNumber)
*/
public void setConversionFactor(final String namespace,
final String pIso4217CurrencyCode,
final FixedPointNumber pFactor) {
if (namespace == null) {
throw new IllegalArgumentException("null namepace given!");
}
if (pIso4217CurrencyCode == null) {
throw new IllegalArgumentException("null currency-id given!");
}
if (pFactor == null) {
throw new IllegalArgumentException("null conversion-factor given!");
}
SimpleCurrencyTable table = getNamespace(namespace);
if (table == null) {
addNameSpace(namespace);
table = getNamespace(namespace);
}
table.setConversionFactor(pIso4217CurrencyCode, pFactor);
fireCurrencyTableChanged(namespace, pIso4217CurrencyCode, pFactor);
}
/**
* @see biz.wolschon.finance.SimpleCurrencyTable#setConversionFactor(java.lang.String, biz.wolschon.numbers.FixedPointNumber)
*/
public FixedPointNumber getConversionFactor(final String namespace,
final String pIso4217CurrencyCode) {
if (pIso4217CurrencyCode == null) {
throw new IllegalArgumentException("null currency-id given!");
}
SimpleCurrencyTable table = getNamespace(namespace);
if (table == null) {
return null;
}
return table.getConversionFactor(pIso4217CurrencyCode);
}
public Collection<String> getNameSpaces() {
if (namespace2CurrencyTable == null) {
namespace2CurrencyTable = new HashMap<String, SimpleCurrencyTable>();
}
return namespace2CurrencyTable.keySet();
}
/**
* @param namespace
* @return
*/
protected SimpleCurrencyTable getNamespace(String namespace) {
if (namespace == null) {
throw new IllegalArgumentException("null namepace given!");
}
if (namespace2CurrencyTable == null) {
namespace2CurrencyTable = new HashMap<String, SimpleCurrencyTable>();
}
return namespace2CurrencyTable.get(namespace);
}
/**
*
*/
public ComplexCurrencyTable() {
super();
addNameSpace("ISO4217", new SimpleCurrencyTable());
}
/**
* @param pNamespace
*/
public Collection<String> getCurrencies(final String pNamespace) {
SimpleCurrencyTable namespace = getNamespace(pNamespace);
if (namespace == null) {
return new HashSet<String>();
}
return namespace.getCurrencies();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.rpc.support;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.extension.ExtensionFactory;
import org.apache.dubbo.common.extension.ExtensionLoader;
import org.apache.dubbo.common.utils.ArrayUtils;
import org.apache.dubbo.common.utils.ConfigUtils;
import org.apache.dubbo.common.utils.PojoUtils;
import org.apache.dubbo.common.utils.ReflectUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.rpc.AsyncRpcResult;
import org.apache.dubbo.rpc.Invocation;
import org.apache.dubbo.rpc.Invoker;
import org.apache.dubbo.rpc.ProxyFactory;
import org.apache.dubbo.rpc.Result;
import org.apache.dubbo.rpc.RpcException;
import org.apache.dubbo.rpc.RpcInvocation;
import com.alibaba.fastjson.JSON;
import java.lang.reflect.Constructor;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import static org.apache.dubbo.rpc.Constants.FAIL_PREFIX;
import static org.apache.dubbo.rpc.Constants.FORCE_PREFIX;
import static org.apache.dubbo.rpc.Constants.MOCK_KEY;
import static org.apache.dubbo.rpc.Constants.RETURN_KEY;
import static org.apache.dubbo.rpc.Constants.RETURN_PREFIX;
import static org.apache.dubbo.rpc.Constants.THROW_PREFIX;
final public class MockInvoker<T> implements Invoker<T> {
private static final ProxyFactory PROXY_FACTORY = ExtensionLoader.getExtensionLoader(ProxyFactory.class).getAdaptiveExtension();
private static final Map<String, Invoker<?>> MOCK_MAP = new ConcurrentHashMap<String, Invoker<?>>();
private static final Map<String, Throwable> THROWABLE_MAP = new ConcurrentHashMap<String, Throwable>();
private final URL url;
private final Class<T> type;
public MockInvoker(URL url, Class<T> type) {
this.url = url;
this.type = type;
}
public static Object parseMockValue(String mock) throws Exception {
return parseMockValue(mock, null);
}
public static Object parseMockValue(String mock, Type[] returnTypes) throws Exception {
Object value = null;
if ("empty".equals(mock)) {
value = ReflectUtils.getEmptyObject(returnTypes != null && returnTypes.length > 0 ? (Class<?>) returnTypes[0] : null);
} else if ("null".equals(mock)) {
value = null;
} else if ("true".equals(mock)) {
value = true;
} else if ("false".equals(mock)) {
value = false;
} else if (mock.length() >= 2 && (mock.startsWith("\"") && mock.endsWith("\"")
|| mock.startsWith("\'") && mock.endsWith("\'"))) {
value = mock.subSequence(1, mock.length() - 1);
} else if (returnTypes != null && returnTypes.length > 0 && returnTypes[0] == String.class) {
value = mock;
} else if (StringUtils.isNumeric(mock, false)) {
value = JSON.parse(mock);
} else if (mock.startsWith("{")) {
value = JSON.parseObject(mock, Map.class);
} else if (mock.startsWith("[")) {
value = JSON.parseObject(mock, List.class);
} else {
value = mock;
}
if (ArrayUtils.isNotEmpty(returnTypes)) {
value = PojoUtils.realize(value, (Class<?>) returnTypes[0], returnTypes.length > 1 ? returnTypes[1] : null);
}
return value;
}
@Override
public Result invoke(Invocation invocation) throws RpcException {
if (invocation instanceof RpcInvocation) {
((RpcInvocation) invocation).setInvoker(this);
}
String mock = getUrl().getMethodParameter(invocation.getMethodName(),MOCK_KEY);
if (StringUtils.isBlank(mock)) {
throw new RpcException(new IllegalAccessException("mock can not be null. url :" + url));
}
mock = normalizeMock(URL.decode(mock));
if (mock.startsWith(RETURN_PREFIX)) {
mock = mock.substring(RETURN_PREFIX.length()).trim();
try {
Type[] returnTypes = RpcUtils.getReturnTypes(invocation);
Object value = parseMockValue(mock, returnTypes);
return AsyncRpcResult.newDefaultAsyncResult(value, invocation);
} catch (Exception ew) {
throw new RpcException("mock return invoke error. method :" + invocation.getMethodName()
+ ", mock:" + mock + ", url: " + url, ew);
}
} else if (mock.startsWith(THROW_PREFIX)) {
mock = mock.substring(THROW_PREFIX.length()).trim();
if (StringUtils.isBlank(mock)) {
throw new RpcException("mocked exception for service degradation.");
} else { // user customized class
Throwable t = getThrowable(mock);
throw new RpcException(RpcException.BIZ_EXCEPTION, t);
}
} else { //impl mock
try {
Invoker<T> invoker = getInvoker(mock);
return invoker.invoke(invocation);
} catch (Throwable t) {
throw new RpcException("Failed to create mock implementation class " + mock, t);
}
}
}
public static Throwable getThrowable(String throwstr) {
Throwable throwable = THROWABLE_MAP.get(throwstr);
if (throwable != null) {
return throwable;
}
try {
Throwable t;
Class<?> bizException = ReflectUtils.forName(throwstr);
Constructor<?> constructor;
constructor = ReflectUtils.findConstructor(bizException, String.class);
t = (Throwable) constructor.newInstance(new Object[]{"mocked exception for service degradation."});
if (THROWABLE_MAP.size() < 1000) {
THROWABLE_MAP.put(throwstr, t);
}
return t;
} catch (Exception e) {
throw new RpcException("mock throw error :" + throwstr + " argument error.", e);
}
}
@SuppressWarnings("unchecked")
private Invoker<T> getInvoker(String mock) {
Class<T> serviceType = (Class<T>) ReflectUtils.forName(url.getServiceInterface());
final boolean isDefault = ConfigUtils.isDefault(mock);
// convert to actual mock service name
String mockService = isDefault ? serviceType.getName() + "Mock" : mock;
Invoker<T> invoker = (Invoker<T>) MOCK_MAP.get(mockService);
if (invoker != null) {
return invoker;
}
T mockObject = (T) getMockObject(mock, serviceType);
invoker = PROXY_FACTORY.getInvoker(mockObject, serviceType, url);
if (MOCK_MAP.size() < 10000) {
MOCK_MAP.put(mockService, invoker);
}
return invoker;
}
@SuppressWarnings("unchecked")
public static Object getMockObject(String mockService, Class serviceType) {
boolean isDefault = ConfigUtils.isDefault(mockService);
if (isDefault) {
mockService = serviceType.getName() + "Mock";
}
Class<?> mockClass;
try {
mockClass = ReflectUtils.forName(mockService);
} catch (Exception e) {
if (!isDefault) {// does not check Spring bean if it is default config.
ExtensionFactory extensionFactory =
ExtensionLoader.getExtensionLoader(ExtensionFactory.class).getAdaptiveExtension();
Object obj = extensionFactory.getExtension(serviceType, mockService);
if (obj != null) {
return obj;
}
}
throw new IllegalStateException("Did not find mock class or instance "
+ mockService
+ ", please check if there's mock class or instance implementing interface "
+ serviceType.getName(), e);
}
if (mockClass == null || !serviceType.isAssignableFrom(mockClass)) {
throw new IllegalStateException("The mock class " + mockClass.getName() +
" not implement interface " + serviceType.getName());
}
try {
return mockClass.newInstance();
} catch (InstantiationException e) {
throw new IllegalStateException("No default constructor from mock class " + mockClass.getName(), e);
} catch (IllegalAccessException e) {
throw new IllegalStateException(e);
}
}
/**
* Normalize mock string:
*
* <ol>
* <li>return => return null</li>
* <li>fail => default</li>
* <li>force => default</li>
* <li>fail:throw/return foo => throw/return foo</li>
* <li>force:throw/return foo => throw/return foo</li>
* </ol>
*
* @param mock mock string
* @return normalized mock string
*/
public static String normalizeMock(String mock) {
if (mock == null) {
return mock;
}
mock = mock.trim();
if (mock.length() == 0) {
return mock;
}
if (RETURN_KEY.equalsIgnoreCase(mock)) {
return RETURN_PREFIX + "null";
}
if (ConfigUtils.isDefault(mock) || "fail".equalsIgnoreCase(mock) || "force".equalsIgnoreCase(mock)) {
return "default";
}
if (mock.startsWith(FAIL_PREFIX)) {
mock = mock.substring(FAIL_PREFIX.length()).trim();
}
if (mock.startsWith(FORCE_PREFIX)) {
mock = mock.substring(FORCE_PREFIX.length()).trim();
}
if (mock.startsWith(RETURN_PREFIX) || mock.startsWith(THROW_PREFIX)) {
mock = mock.replace('`', '"');
}
return mock;
}
@Override
public URL getUrl() {
return this.url;
}
@Override
public boolean isAvailable() {
return true;
}
@Override
public void destroy() {
//do nothing
}
@Override
public Class<T> getInterface() {
return type;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.metrics.DrillMetrics;
import org.apache.drill.exec.physical.EndpointAffinity;
import org.apache.drill.exec.physical.PhysicalOperatorSetupException;
import org.apache.drill.exec.physical.base.AbstractFileGroupScan;
import org.apache.drill.exec.physical.base.FileGroupScan;
import org.apache.drill.exec.physical.base.GroupScan;
import org.apache.drill.exec.physical.base.PhysicalOperator;
import org.apache.drill.exec.physical.base.ScanStats;
import org.apache.drill.exec.physical.base.ScanStats.GroupScanProperty;
import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
import org.apache.drill.exec.store.ParquetOutputRecordWriter;
import org.apache.drill.exec.store.StoragePluginRegistry;
import org.apache.drill.exec.store.TimedRunnable;
import org.apache.drill.exec.store.dfs.DrillFileSystem;
import org.apache.drill.exec.store.dfs.DrillPathFilter;
import org.apache.drill.exec.store.dfs.FileSelection;
import org.apache.drill.exec.store.dfs.ReadEntryFromHDFS;
import org.apache.drill.exec.store.dfs.ReadEntryWithPath;
import org.apache.drill.exec.store.dfs.easy.FileWork;
import org.apache.drill.exec.store.parquet.Metadata.ColumnMetadata;
import org.apache.drill.exec.store.parquet.Metadata.ParquetFileMetadata;
import org.apache.drill.exec.store.parquet.Metadata.ParquetTableMetadataBase;
import org.apache.drill.exec.store.parquet.Metadata.RowGroupMetadata;
import org.apache.drill.exec.store.schedule.AffinityCreator;
import org.apache.drill.exec.store.schedule.AssignmentCreator;
import org.apache.drill.exec.store.schedule.BlockMapBuilder;
import org.apache.drill.exec.store.schedule.CompleteWork;
import org.apache.drill.exec.store.schedule.EndpointByteMap;
import org.apache.drill.exec.store.schedule.EndpointByteMapImpl;
import org.apache.drill.exec.util.ImpersonationUtil;
import org.apache.drill.exec.vector.NullableBigIntVector;
import org.apache.drill.exec.vector.NullableDateVector;
import org.apache.drill.exec.vector.NullableDecimal18Vector;
import org.apache.drill.exec.vector.NullableFloat4Vector;
import org.apache.drill.exec.vector.NullableFloat8Vector;
import org.apache.drill.exec.vector.NullableIntVector;
import org.apache.drill.exec.vector.NullableSmallIntVector;
import org.apache.drill.exec.vector.NullableTimeStampVector;
import org.apache.drill.exec.vector.NullableTimeVector;
import org.apache.drill.exec.vector.NullableTinyIntVector;
import org.apache.drill.exec.vector.NullableUInt1Vector;
import org.apache.drill.exec.vector.NullableUInt2Vector;
import org.apache.drill.exec.vector.NullableUInt4Vector;
import org.apache.drill.exec.vector.NullableVarBinaryVector;
import org.apache.drill.exec.vector.NullableVarCharVector;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import org.joda.time.DateTimeUtils;
import com.codahale.metrics.MetricRegistry;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
@JsonTypeName("parquet-scan")
public class ParquetGroupScan extends AbstractFileGroupScan {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParquetGroupScan.class);
static final MetricRegistry metrics = DrillMetrics.getInstance();
static final String READ_FOOTER_TIMER = MetricRegistry.name(ParquetGroupScan.class, "readFooter");
private final List<ReadEntryWithPath> entries;
private final Stopwatch watch = Stopwatch.createUnstarted();
private final ParquetFormatPlugin formatPlugin;
private final ParquetFormatConfig formatConfig;
private final DrillFileSystem fs;
private String selectionRoot;
private boolean usedMetadataCache = false;
private List<EndpointAffinity> endpointAffinities;
private List<SchemaPath> columns;
private ListMultimap<Integer, RowGroupInfo> mappings;
private List<RowGroupInfo> rowGroupInfos;
/**
* The parquet table metadata may have already been read
* from a metadata cache file earlier; we can re-use during
* the ParquetGroupScan and avoid extra loading time.
*/
private Metadata.ParquetTableMetadataBase parquetTableMetadata = null;
/*
* total number of rows (obtained from parquet footer)
*/
private long rowCount;
/*
* total number of non-null value for each column in parquet files.
*/
private Map<SchemaPath, Long> columnValueCounts;
@JsonCreator public ParquetGroupScan( //
@JsonProperty("userName") String userName,
@JsonProperty("entries") List<ReadEntryWithPath> entries,//
@JsonProperty("storage") StoragePluginConfig storageConfig, //
@JsonProperty("format") FormatPluginConfig formatConfig, //
@JacksonInject StoragePluginRegistry engineRegistry, //
@JsonProperty("columns") List<SchemaPath> columns, //
@JsonProperty("selectionRoot") String selectionRoot //
) throws IOException, ExecutionSetupException {
super(ImpersonationUtil.resolveUserName(userName));
this.columns = columns;
if (formatConfig == null) {
formatConfig = new ParquetFormatConfig();
}
Preconditions.checkNotNull(storageConfig);
Preconditions.checkNotNull(formatConfig);
this.formatPlugin = (ParquetFormatPlugin) engineRegistry.getFormatPlugin(storageConfig, formatConfig);
Preconditions.checkNotNull(formatPlugin);
this.fs = ImpersonationUtil.createFileSystem(getUserName(), formatPlugin.getFsConf());
this.formatConfig = formatPlugin.getConfig();
this.entries = entries;
this.selectionRoot = selectionRoot;
init();
}
public ParquetGroupScan( //
String userName,
FileSelection selection, //
ParquetFormatPlugin formatPlugin, //
String selectionRoot,
List<SchemaPath> columns) //
throws IOException {
super(userName);
this.formatPlugin = formatPlugin;
this.columns = columns;
this.formatConfig = formatPlugin.getConfig();
this.fs = ImpersonationUtil.createFileSystem(userName, formatPlugin.getFsConf());
this.selectionRoot = selectionRoot;
FileSelection newSelection = null;
if (!selection.isExpanded()) {
// if metadata cache exists, do the expansion of selection using the metadata cache;
// otherwise let init() handle the expansion
FileStatus firstPath = selection.getFirstPath(fs);
Path p = new Path(firstPath.getPath(), Metadata.METADATA_FILENAME);
if (fs.exists(p)) {
newSelection = initFromMetadataCache(fs, selection);
}
}
FileSelection fileSelection = newSelection != null ? newSelection : selection;
this.entries = Lists.newArrayList();
final List<FileStatus> files = fileSelection.getStatuses(fs);
for (FileStatus file : files) {
entries.add(new ReadEntryWithPath(file.getPath().toString()));
}
init();
}
/*
* This is used to clone another copy of the group scan.
*/
private ParquetGroupScan(ParquetGroupScan that) {
super(that);
this.columns = that.columns == null ? null : Lists.newArrayList(that.columns);
this.endpointAffinities = that.endpointAffinities == null ? null : Lists.newArrayList(that.endpointAffinities);
this.entries = that.entries == null ? null : Lists.newArrayList(that.entries);
this.formatConfig = that.formatConfig;
this.formatPlugin = that.formatPlugin;
this.fs = that.fs;
this.mappings = that.mappings == null ? null : ArrayListMultimap.create(that.mappings);
this.rowCount = that.rowCount;
this.rowGroupInfos = that.rowGroupInfos == null ? null : Lists.newArrayList(that.rowGroupInfos);
this.selectionRoot = that.selectionRoot;
this.columnValueCounts = that.columnValueCounts == null ? null : new HashMap<>(that.columnValueCounts);
this.columnTypeMap = that.columnTypeMap == null ? null : new HashMap<>(that.columnTypeMap);
this.partitionValueMap = that.partitionValueMap == null ? null : new HashMap<>(that.partitionValueMap);
this.fileSet = that.fileSet == null ? null : new HashSet<>(that.fileSet);
this.usedMetadataCache = that.usedMetadataCache;
this.parquetTableMetadata = that.parquetTableMetadata;
}
public List<ReadEntryWithPath> getEntries() {
return entries;
}
@JsonProperty("format")
public ParquetFormatConfig getFormatConfig() {
return this.formatConfig;
}
@JsonProperty("storage")
public StoragePluginConfig getEngineConfig() {
return this.formatPlugin.getStorageConfig();
}
public String getSelectionRoot() {
return selectionRoot;
}
public Set<String> getFileSet() {
return fileSet;
}
@Override
public boolean hasFiles() {
return true;
}
@Override
public Collection<String> getFiles() {
return fileSet;
}
private Set<String> fileSet;
@JsonIgnore
private Map<SchemaPath, MajorType> columnTypeMap = Maps.newHashMap();
/**
* When reading the very first footer, any column is a potential partition column. So for the first footer, we check
* every column to see if it is single valued, and if so, add it to the list of potential partition columns. For the
* remaining footers, we will not find any new partition columns, but we may discover that what was previously a
* potential partition column now no longer qualifies, so it needs to be removed from the list.
* @return whether column is a potential partition column
*/
private boolean checkForPartitionColumn(ColumnMetadata columnMetadata, boolean first) {
SchemaPath schemaPath = SchemaPath.getCompoundPath(columnMetadata.getName());
final PrimitiveTypeName primitiveType;
final OriginalType originalType;
if (this.parquetTableMetadata.hasColumnMetadata()) {
primitiveType = this.parquetTableMetadata.getPrimitiveType(columnMetadata.getName());
originalType = this.parquetTableMetadata.getOriginalType(columnMetadata.getName());
} else {
primitiveType = columnMetadata.getPrimitiveType();
originalType = columnMetadata.getOriginalType();
}
if (first) {
if (hasSingleValue(columnMetadata)) {
columnTypeMap.put(schemaPath, getType(primitiveType, originalType));
return true;
} else {
return false;
}
} else {
if (!columnTypeMap.keySet().contains(schemaPath)) {
return false;
} else {
if (!hasSingleValue(columnMetadata)) {
columnTypeMap.remove(schemaPath);
return false;
}
if (!getType(primitiveType, originalType).equals(columnTypeMap.get(schemaPath))) {
columnTypeMap.remove(schemaPath);
return false;
}
}
}
return true;
}
private MajorType getType(PrimitiveTypeName type, OriginalType originalType) {
if (originalType != null) {
switch (originalType) {
case DECIMAL:
return Types.optional(MinorType.DECIMAL18);
case DATE:
return Types.optional(MinorType.DATE);
case TIME_MILLIS:
return Types.optional(MinorType.TIME);
case TIMESTAMP_MILLIS:
return Types.optional(MinorType.TIMESTAMP);
case UTF8:
return Types.optional(MinorType.VARCHAR);
case UINT_8:
return Types.optional(MinorType.UINT1);
case UINT_16:
return Types.optional(MinorType.UINT2);
case UINT_32:
return Types.optional(MinorType.UINT4);
case UINT_64:
return Types.optional(MinorType.UINT8);
case INT_8:
return Types.optional(MinorType.TINYINT);
case INT_16:
return Types.optional(MinorType.SMALLINT);
}
}
switch (type) {
case BOOLEAN:
return Types.optional(MinorType.BIT);
case INT32:
return Types.optional(MinorType.INT);
case INT64:
return Types.optional(MinorType.BIGINT);
case FLOAT:
return Types.optional(MinorType.FLOAT4);
case DOUBLE:
return Types.optional(MinorType.FLOAT8);
case BINARY:
case FIXED_LEN_BYTE_ARRAY:
case INT96:
return Types.optional(MinorType.VARBINARY);
default:
// Should never hit this
throw new UnsupportedOperationException("Unsupported type:" + type);
}
}
private boolean hasSingleValue(ColumnMetadata columnChunkMetaData) {
// ColumnMetadata will have a non-null value iff the minValue and the maxValue for the
// rowgroup are the same
return (columnChunkMetaData != null) && (columnChunkMetaData.hasSingleValue());
}
@Override public void modifyFileSelection(FileSelection selection) {
entries.clear();
fileSet = Sets.newHashSet();
for (String fileName : selection.getFiles()) {
entries.add(new ReadEntryWithPath(fileName));
fileSet.add(fileName);
}
List<RowGroupInfo> newRowGroupList = Lists.newArrayList();
for (RowGroupInfo rowGroupInfo : rowGroupInfos) {
if (fileSet.contains(rowGroupInfo.getPath())) {
newRowGroupList.add(rowGroupInfo);
}
}
this.rowGroupInfos = newRowGroupList;
}
public MajorType getTypeForColumn(SchemaPath schemaPath) {
return columnTypeMap.get(schemaPath);
}
private Map<String, Map<SchemaPath, Object>> partitionValueMap = Maps.newHashMap();
public void populatePruningVector(ValueVector v, int index, SchemaPath column, String file) {
String f = Path.getPathWithoutSchemeAndAuthority(new Path(file)).toString();
MinorType type = getTypeForColumn(column).getMinorType();
switch (type) {
case INT: {
NullableIntVector intVector = (NullableIntVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
intVector.getMutator().setSafe(index, value);
return;
}
case SMALLINT: {
NullableSmallIntVector smallIntVector = (NullableSmallIntVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
smallIntVector.getMutator().setSafe(index, value.shortValue());
return;
}
case TINYINT: {
NullableTinyIntVector tinyIntVector = (NullableTinyIntVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
tinyIntVector.getMutator().setSafe(index, value.byteValue());
return;
}
case UINT1: {
NullableUInt1Vector intVector = (NullableUInt1Vector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
intVector.getMutator().setSafe(index, value.byteValue());
return;
}
case UINT2: {
NullableUInt2Vector intVector = (NullableUInt2Vector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
intVector.getMutator().setSafe(index, (char) value.shortValue());
return;
}
case UINT4: {
NullableUInt4Vector intVector = (NullableUInt4Vector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
intVector.getMutator().setSafe(index, value);
return;
}
case BIGINT: {
NullableBigIntVector bigIntVector = (NullableBigIntVector) v;
Long value = (Long) partitionValueMap.get(f).get(column);
bigIntVector.getMutator().setSafe(index, value);
return;
}
case FLOAT4: {
NullableFloat4Vector float4Vector = (NullableFloat4Vector) v;
Float value = (Float) partitionValueMap.get(f).get(column);
float4Vector.getMutator().setSafe(index, value);
return;
}
case FLOAT8: {
NullableFloat8Vector float8Vector = (NullableFloat8Vector) v;
Double value = (Double) partitionValueMap.get(f).get(column);
float8Vector.getMutator().setSafe(index, value);
return;
}
case VARBINARY: {
NullableVarBinaryVector varBinaryVector = (NullableVarBinaryVector) v;
Object s = partitionValueMap.get(f).get(column);
byte[] bytes;
if (s instanceof Binary) {
bytes = ((Binary) s).getBytes();
} else if (s instanceof String) {
bytes = ((String) s).getBytes();
} else if (s instanceof byte[]) {
bytes = (byte[]) s;
} else {
throw new UnsupportedOperationException("Unable to create column data for type: " + type);
}
varBinaryVector.getMutator().setSafe(index, bytes, 0, bytes.length);
return;
}
case DECIMAL18: {
NullableDecimal18Vector decimalVector = (NullableDecimal18Vector) v;
Long value = (Long) partitionValueMap.get(f).get(column);
decimalVector.getMutator().setSafe(index, value);
return;
}
case DATE: {
NullableDateVector dateVector = (NullableDateVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
dateVector.getMutator().setSafe(index, DateTimeUtils.fromJulianDay(value - ParquetOutputRecordWriter.JULIAN_DAY_EPOC - 0.5));
return;
}
case TIME: {
NullableTimeVector timeVector = (NullableTimeVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
timeVector.getMutator().setSafe(index, value);
return;
}
case TIMESTAMP: {
NullableTimeStampVector timeStampVector = (NullableTimeStampVector) v;
Long value = (Long) partitionValueMap.get(f).get(column);
timeStampVector.getMutator().setSafe(index, value);
return;
}
case VARCHAR: {
NullableVarCharVector varCharVector = (NullableVarCharVector) v;
Object s = partitionValueMap.get(f).get(column);
byte[] bytes;
if (s instanceof String) { // if the metadata was read from a JSON cache file it maybe a string type
bytes = ((String) s).getBytes();
} else if (s instanceof Binary) {
bytes = ((Binary) s).getBytes();
} else if (s instanceof byte[]) {
bytes = (byte[]) s;
} else {
throw new UnsupportedOperationException("Unable to create column data for type: " + type);
}
varCharVector.getMutator().setSafe(index, bytes, 0, bytes.length);
return;
}
default:
throw new UnsupportedOperationException("Unsupported type: " + type);
}
}
public static class RowGroupInfo extends ReadEntryFromHDFS implements CompleteWork, FileWork {
private EndpointByteMap byteMap;
private int rowGroupIndex;
private String root;
private long rowCount; // rowCount = -1 indicates to include all rows.
@JsonCreator
public RowGroupInfo(@JsonProperty("path") String path, @JsonProperty("start") long start,
@JsonProperty("length") long length, @JsonProperty("rowGroupIndex") int rowGroupIndex, long rowCount) {
super(path, start, length);
this.rowGroupIndex = rowGroupIndex;
this.rowCount = rowCount;
}
public RowGroupReadEntry getRowGroupReadEntry() {
return new RowGroupReadEntry(this.getPath(), this.getStart(), this.getLength(), this.rowGroupIndex);
}
public int getRowGroupIndex() {
return this.rowGroupIndex;
}
@Override
public int compareTo(CompleteWork o) {
return Long.compare(getTotalBytes(), o.getTotalBytes());
}
@Override
public long getTotalBytes() {
return this.getLength();
}
@Override
public EndpointByteMap getByteMap() {
return byteMap;
}
public void setEndpointByteMap(EndpointByteMap byteMap) {
this.byteMap = byteMap;
}
public long getRowCount() {
return rowCount;
}
}
// Create and return a new file selection based on reading the metadata cache file.
// This function also initializes a few of ParquetGroupScan's fields as appropriate.
private FileSelection
initFromMetadataCache(DrillFileSystem fs, FileSelection selection) throws IOException {
FileStatus metaRootDir = selection.getFirstPath(fs);
Path metaFilePath = new Path(metaRootDir.getPath(), Metadata.METADATA_FILENAME);
// get (and set internal field) the metadata for the directory by reading the metadata file
this.parquetTableMetadata = Metadata.readBlockMeta(fs, metaFilePath.toString());
List<String> fileNames = Lists.newArrayList();
for (Metadata.ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
fileNames.add(file.getPath());
}
// when creating the file selection, set the selection root in the form /a/b instead of
// file:/a/b. The reason is that the file names above have been created in the form
// /a/b/c.parquet and the format of the selection root must match that of the file names
// otherwise downstream operations such as partition pruning can break.
final Path metaRootPath = Path.getPathWithoutSchemeAndAuthority(metaRootDir.getPath());
this.selectionRoot = metaRootPath.toString();
// Use the FileSelection constructor directly here instead of the FileSelection.create() method
// because create() changes the root to include the scheme and authority; In future, if create()
// is the preferred way to instantiate a file selection, we may need to do something different...
FileSelection newSelection = new FileSelection(selection.getStatuses(fs), fileNames, metaRootPath.toString());
newSelection.setExpanded();
return newSelection;
}
private void init() throws IOException {
List<FileStatus> fileStatuses = null;
if (entries.size() == 1) {
Path p = Path.getPathWithoutSchemeAndAuthority(new Path(entries.get(0).getPath()));
Path metaPath = null;
if (fs.isDirectory(p)) {
// Using the metadata file makes sense when querying a directory; otherwise
// if querying a single file we can look up the metadata directly from the file
metaPath = new Path(p, Metadata.METADATA_FILENAME);
}
if (metaPath != null && fs.exists(metaPath)) {
usedMetadataCache = true;
if (parquetTableMetadata == null) {
parquetTableMetadata = Metadata.readBlockMeta(fs, metaPath.toString());
}
} else {
parquetTableMetadata = Metadata.getParquetTableMetadata(fs, p.toString());
}
} else {
Path p = Path.getPathWithoutSchemeAndAuthority(new Path(selectionRoot));
Path metaPath = new Path(p, Metadata.METADATA_FILENAME);
if (fs.isDirectory(new Path(selectionRoot)) && fs.exists(metaPath)) {
usedMetadataCache = true;
if (fileSet != null) {
if (parquetTableMetadata == null) {
parquetTableMetadata = removeUnneededRowGroups(Metadata.readBlockMeta(fs, metaPath.toString()));
} else {
parquetTableMetadata = removeUnneededRowGroups(parquetTableMetadata);
}
} else {
if (parquetTableMetadata == null) {
parquetTableMetadata = Metadata.readBlockMeta(fs, metaPath.toString());
}
}
} else {
fileStatuses = Lists.newArrayList();
for (ReadEntryWithPath entry : entries) {
getFiles(entry.getPath(), fileStatuses);
}
parquetTableMetadata = Metadata.getParquetTableMetadata(fs, fileStatuses);
}
}
if (fileSet == null) {
fileSet = Sets.newHashSet();
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
fileSet.add(file.getPath());
}
}
Map<String, DrillbitEndpoint> hostEndpointMap = Maps.newHashMap();
for (DrillbitEndpoint endpoint : formatPlugin.getContext().getBits()) {
hostEndpointMap.put(endpoint.getAddress(), endpoint);
}
rowGroupInfos = Lists.newArrayList();
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
int rgIndex = 0;
for (RowGroupMetadata rg : file.getRowGroups()) {
RowGroupInfo rowGroupInfo =
new RowGroupInfo(file.getPath(), rg.getStart(), rg.getLength(), rgIndex, rg.getRowCount());
EndpointByteMap endpointByteMap = new EndpointByteMapImpl();
for (String host : rg.getHostAffinity().keySet()) {
if (hostEndpointMap.containsKey(host)) {
endpointByteMap
.add(hostEndpointMap.get(host), (long) (rg.getHostAffinity().get(host) * rg.getLength()));
}
}
rowGroupInfo.setEndpointByteMap(endpointByteMap);
rgIndex++;
rowGroupInfos.add(rowGroupInfo);
}
}
this.endpointAffinities = AffinityCreator.getAffinityMap(rowGroupInfos);
columnValueCounts = Maps.newHashMap();
this.rowCount = 0;
boolean first = true;
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
for (RowGroupMetadata rowGroup : file.getRowGroups()) {
long rowCount = rowGroup.getRowCount();
for (ColumnMetadata column : rowGroup.getColumns()) {
SchemaPath schemaPath = SchemaPath.getCompoundPath(column.getName());
Long previousCount = columnValueCounts.get(schemaPath);
if (previousCount != null) {
if (previousCount != GroupScan.NO_COLUMN_STATS) {
if (column.getNulls() != null) {
Long newCount = rowCount - column.getNulls();
columnValueCounts.put(schemaPath, columnValueCounts.get(schemaPath) + newCount);
} else {
}
}
} else {
if (column.getNulls() != null) {
Long newCount = rowCount - column.getNulls();
columnValueCounts.put(schemaPath, newCount);
} else {
columnValueCounts.put(schemaPath, GroupScan.NO_COLUMN_STATS);
}
}
boolean partitionColumn = checkForPartitionColumn(column, first);
if (partitionColumn) {
Map<SchemaPath, Object> map = partitionValueMap.get(file.getPath());
if (map == null) {
map = Maps.newHashMap();
partitionValueMap.put(file.getPath(), map);
}
Object value = map.get(schemaPath);
Object currentValue = column.getMaxValue();
if (value != null) {
if (value != currentValue) {
columnTypeMap.remove(schemaPath);
}
} else {
map.put(schemaPath, currentValue);
}
} else {
columnTypeMap.remove(schemaPath);
}
}
this.rowCount += rowGroup.getRowCount();
first = false;
}
}
}
private ParquetTableMetadataBase removeUnneededRowGroups(ParquetTableMetadataBase parquetTableMetadata) {
List<ParquetFileMetadata> newFileMetadataList = Lists.newArrayList();
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
if (fileSet.contains(file.getPath())) {
newFileMetadataList.add(file);
}
}
ParquetTableMetadataBase metadata = parquetTableMetadata.clone();
metadata.assignFiles(newFileMetadataList);
return metadata;
}
/**
* Calculates the affinity each endpoint has for this scan, by adding up the affinity each endpoint has for each
* rowGroup
*
* @return a list of EndpointAffinity objects
*/
@Override
public List<EndpointAffinity> getOperatorAffinity() {
return this.endpointAffinities;
}
private void getFiles(String path, List<FileStatus> fileStatuses) throws IOException {
Path p = Path.getPathWithoutSchemeAndAuthority(new Path(path));
FileStatus fileStatus = fs.getFileStatus(p);
if (fileStatus.isDirectory()) {
for (FileStatus f : fs.listStatus(p, new DrillPathFilter())) {
getFiles(f.getPath().toString(), fileStatuses);
}
} else {
fileStatuses.add(fileStatus);
}
}
private class BlockMapper extends TimedRunnable<Void> {
private final BlockMapBuilder bmb;
private final RowGroupInfo rgi;
public BlockMapper(BlockMapBuilder bmb, RowGroupInfo rgi) {
super();
this.bmb = bmb;
this.rgi = rgi;
}
@Override
protected Void runInner() throws Exception {
EndpointByteMap ebm = bmb.getEndpointByteMap(rgi);
rgi.setEndpointByteMap(ebm);
return null;
}
@Override
protected IOException convertToIOException(Exception e) {
return new IOException(String.format(
"Failure while trying to get block locations for file %s starting at %d.", rgi.getPath(),
rgi.getStart()));
}
}
@Override
public void applyAssignments(List<DrillbitEndpoint> incomingEndpoints) throws PhysicalOperatorSetupException {
this.mappings = AssignmentCreator.getMappings(incomingEndpoints, rowGroupInfos, formatPlugin.getContext());
}
@Override public ParquetRowGroupScan getSpecificScan(int minorFragmentId) {
assert minorFragmentId < mappings.size() : String
.format("Mappings length [%d] should be longer than minor fragment id [%d] but it isn't.",
mappings.size(), minorFragmentId);
List<RowGroupInfo> rowGroupsForMinor = mappings.get(minorFragmentId);
Preconditions.checkArgument(!rowGroupsForMinor.isEmpty(),
String.format("MinorFragmentId %d has no read entries assigned", minorFragmentId));
return new ParquetRowGroupScan(
getUserName(), formatPlugin, convertToReadEntries(rowGroupsForMinor), columns, selectionRoot);
}
private List<RowGroupReadEntry> convertToReadEntries(List<RowGroupInfo> rowGroups) {
List<RowGroupReadEntry> entries = Lists.newArrayList();
for (RowGroupInfo rgi : rowGroups) {
RowGroupReadEntry entry = new RowGroupReadEntry(rgi.getPath(), rgi.getStart(), rgi.getLength(), rgi.getRowGroupIndex());
entries.add(entry);
}
return entries;
}
@Override
public int getMaxParallelizationWidth() {
return rowGroupInfos.size();
}
public List<SchemaPath> getColumns() {
return columns;
}
@Override
public ScanStats getScanStats() {
int columnCount = columns == null ? 20 : columns.size();
return new ScanStats(GroupScanProperty.EXACT_ROW_COUNT, rowCount, 1, rowCount * columnCount);
}
@Override
@JsonIgnore
public PhysicalOperator getNewWithChildren(List<PhysicalOperator> children) {
Preconditions.checkArgument(children.isEmpty());
return new ParquetGroupScan(this);
}
@Override
public String getDigest() {
return toString();
}
@Override
public String toString() {
return "ParquetGroupScan [entries=" + entries
+ ", selectionRoot=" + selectionRoot
+ ", numFiles=" + getEntries().size()
+ ", usedMetadataFile=" + usedMetadataCache
+ ", columns=" + columns + "]";
}
@Override
public GroupScan clone(List<SchemaPath> columns) {
ParquetGroupScan newScan = new ParquetGroupScan(this);
newScan.columns = columns;
return newScan;
}
@Override
public FileGroupScan clone(FileSelection selection) throws IOException {
ParquetGroupScan newScan = new ParquetGroupScan(this);
newScan.modifyFileSelection(selection);
newScan.init();
return newScan;
}
@Override
public boolean supportsLimitPushdown() {
return true;
}
@Override
public GroupScan applyLimit(long maxRecords) {
Preconditions.checkArgument(rowGroupInfos.size() >= 0);
maxRecords = Math.max(maxRecords, 1); // Make sure it request at least 1 row -> 1 rowGroup.
// further optimization : minimize # of files chosen, or the affinity of files chosen.
long count = 0;
int index = 0;
for (RowGroupInfo rowGroupInfo : rowGroupInfos) {
if (count < maxRecords) {
count += rowGroupInfo.getRowCount();
index ++;
} else {
break;
}
}
Set<String> fileNames = Sets.newHashSet(); // HashSet keeps a fileName unique.
for (RowGroupInfo rowGroupInfo : rowGroupInfos.subList(0, index)) {
fileNames.add(rowGroupInfo.getPath());
}
if (fileNames.size() == fileSet.size() ) {
// There is no reduction of rowGroups. Return the original groupScan.
logger.debug("applyLimit() does not apply!");
return null;
}
try {
FileSelection newSelection = new FileSelection(null, Lists.newArrayList(fileNames), getSelectionRoot());
logger.debug("applyLimit() reduce parquet file # from {} to {}", fileSet.size(), fileNames.size());
return this.clone(newSelection);
} catch (IOException e) {
logger.warn("Could not apply rowcount based prune due to Exception : {}", e);
return null;
}
}
@Override
@JsonIgnore
public boolean canPushdownProjects(List<SchemaPath> columns) {
return true;
}
/**
* Return column value count for the specified column. If does not contain such column, return 0.
*/
@Override
public long getColumnValueCount(SchemaPath column) {
return columnValueCounts.containsKey(column) ? columnValueCounts.get(column) : 0;
}
@Override
public List<SchemaPath> getPartitionColumns() {
return new ArrayList<>(columnTypeMap.keySet());
}
}
| |
/**
* Copyright The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.client.CompactionState;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
@InterfaceAudience.Private
public final class RegionMetricsBuilder {
public static List<RegionMetrics> toRegionMetrics(
AdminProtos.GetRegionLoadResponse regionLoadResponse) {
return regionLoadResponse.getRegionLoadsList().stream()
.map(RegionMetricsBuilder::toRegionMetrics).collect(Collectors.toList());
}
public static RegionMetrics toRegionMetrics(ClusterStatusProtos.RegionLoad regionLoadPB) {
return RegionMetricsBuilder
.newBuilder(regionLoadPB.getRegionSpecifier().getValue().toByteArray())
.setBloomFilterSize(new Size(regionLoadPB.getTotalStaticBloomSizeKB(), Size.Unit.KILOBYTE))
.setCompactedCellCount(regionLoadPB.getCurrentCompactedKVs())
.setCompactingCellCount(regionLoadPB.getTotalCompactingKVs())
.setCompletedSequenceId(regionLoadPB.getCompleteSequenceId())
.setDataLocality(regionLoadPB.hasDataLocality() ? regionLoadPB.getDataLocality() : 0.0f)
.setDataLocalityForSsd(regionLoadPB.hasDataLocalityForSsd() ?
regionLoadPB.getDataLocalityForSsd() : 0.0f)
.setBlocksLocalWeight(regionLoadPB.hasBlocksLocalWeight() ?
regionLoadPB.getBlocksLocalWeight() : 0)
.setBlocksLocalWithSsdWeight(regionLoadPB.hasBlocksLocalWithSsdWeight() ?
regionLoadPB.getBlocksLocalWithSsdWeight() : 0)
.setBlocksTotalWeight(regionLoadPB.getBlocksTotalWeight())
.setCompactionState(ProtobufUtil.createCompactionStateForRegionLoad(
regionLoadPB.getCompactionState()))
.setFilteredReadRequestCount(regionLoadPB.getFilteredReadRequestsCount())
.setStoreFileUncompressedDataIndexSize(new Size(regionLoadPB.getTotalStaticIndexSizeKB(),
Size.Unit.KILOBYTE))
.setLastMajorCompactionTimestamp(regionLoadPB.getLastMajorCompactionTs())
.setMemStoreSize(new Size(regionLoadPB.getMemStoreSizeMB(), Size.Unit.MEGABYTE))
.setReadRequestCount(regionLoadPB.getReadRequestsCount())
.setCpRequestCount(regionLoadPB.getCpRequestsCount())
.setWriteRequestCount(regionLoadPB.getWriteRequestsCount())
.setStoreFileIndexSize(new Size(regionLoadPB.getStorefileIndexSizeKB(),
Size.Unit.KILOBYTE))
.setStoreFileRootLevelIndexSize(new Size(regionLoadPB.getRootIndexSizeKB(),
Size.Unit.KILOBYTE))
.setStoreCount(regionLoadPB.getStores())
.setStoreFileCount(regionLoadPB.getStorefiles())
.setStoreRefCount(regionLoadPB.getStoreRefCount())
.setMaxCompactedStoreFileRefCount(regionLoadPB.getMaxCompactedStoreFileRefCount())
.setStoreFileSize(new Size(regionLoadPB.getStorefileSizeMB(), Size.Unit.MEGABYTE))
.setStoreSequenceIds(regionLoadPB.getStoreCompleteSequenceIdList().stream()
.collect(Collectors.toMap(
(ClusterStatusProtos.StoreSequenceId s) -> s.getFamilyName().toByteArray(),
ClusterStatusProtos.StoreSequenceId::getSequenceId)))
.setUncompressedStoreFileSize(
new Size(regionLoadPB.getStoreUncompressedSizeMB(),Size.Unit.MEGABYTE))
.build();
}
private static List<ClusterStatusProtos.StoreSequenceId> toStoreSequenceId(
Map<byte[], Long> ids) {
return ids.entrySet().stream()
.map(e -> ClusterStatusProtos.StoreSequenceId.newBuilder()
.setFamilyName(UnsafeByteOperations.unsafeWrap(e.getKey()))
.setSequenceId(e.getValue())
.build())
.collect(Collectors.toList());
}
public static ClusterStatusProtos.RegionLoad toRegionLoad(RegionMetrics regionMetrics) {
return ClusterStatusProtos.RegionLoad.newBuilder()
.setRegionSpecifier(HBaseProtos.RegionSpecifier
.newBuilder().setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME)
.setValue(UnsafeByteOperations.unsafeWrap(regionMetrics.getRegionName()))
.build())
.setTotalStaticBloomSizeKB((int) regionMetrics.getBloomFilterSize()
.get(Size.Unit.KILOBYTE))
.setCurrentCompactedKVs(regionMetrics.getCompactedCellCount())
.setTotalCompactingKVs(regionMetrics.getCompactingCellCount())
.setCompleteSequenceId(regionMetrics.getCompletedSequenceId())
.setDataLocality(regionMetrics.getDataLocality())
.setFilteredReadRequestsCount(regionMetrics.getFilteredReadRequestCount())
.setTotalStaticIndexSizeKB((int) regionMetrics.getStoreFileUncompressedDataIndexSize()
.get(Size.Unit.KILOBYTE))
.setLastMajorCompactionTs(regionMetrics.getLastMajorCompactionTimestamp())
.setMemStoreSizeMB((int) regionMetrics.getMemStoreSize().get(Size.Unit.MEGABYTE))
.setReadRequestsCount(regionMetrics.getReadRequestCount())
.setCpRequestsCount(regionMetrics.getCpRequestCount())
.setWriteRequestsCount(regionMetrics.getWriteRequestCount())
.setStorefileIndexSizeKB((long) regionMetrics.getStoreFileIndexSize()
.get(Size.Unit.KILOBYTE))
.setRootIndexSizeKB((int) regionMetrics.getStoreFileRootLevelIndexSize()
.get(Size.Unit.KILOBYTE))
.setStores(regionMetrics.getStoreCount())
.setStorefiles(regionMetrics.getStoreFileCount())
.setStoreRefCount(regionMetrics.getStoreRefCount())
.setMaxCompactedStoreFileRefCount(regionMetrics.getMaxCompactedStoreFileRefCount())
.setStorefileSizeMB((int) regionMetrics.getStoreFileSize().get(Size.Unit.MEGABYTE))
.addAllStoreCompleteSequenceId(toStoreSequenceId(regionMetrics.getStoreSequenceId()))
.setStoreUncompressedSizeMB(
(int) regionMetrics.getUncompressedStoreFileSize().get(Size.Unit.MEGABYTE))
.build();
}
public static RegionMetricsBuilder newBuilder(byte[] name) {
return new RegionMetricsBuilder(name);
}
private final byte[] name;
private int storeCount;
private int storeFileCount;
private int storeRefCount;
private int maxCompactedStoreFileRefCount;
private long compactingCellCount;
private long compactedCellCount;
private Size storeFileSize = Size.ZERO;
private Size memStoreSize = Size.ZERO;
private Size indexSize = Size.ZERO;
private Size rootLevelIndexSize = Size.ZERO;
private Size uncompressedDataIndexSize = Size.ZERO;
private Size bloomFilterSize = Size.ZERO;
private Size uncompressedStoreFileSize = Size.ZERO;
private long writeRequestCount;
private long readRequestCount;
private long cpRequestCount;
private long filteredReadRequestCount;
private long completedSequenceId;
private Map<byte[], Long> storeSequenceIds = Collections.emptyMap();
private float dataLocality;
private long lastMajorCompactionTimestamp;
private float dataLocalityForSsd;
private long blocksLocalWeight;
private long blocksLocalWithSsdWeight;
private long blocksTotalWeight;
private CompactionState compactionState;
private RegionMetricsBuilder(byte[] name) {
this.name = name;
}
public RegionMetricsBuilder setStoreCount(int value) {
this.storeCount = value;
return this;
}
public RegionMetricsBuilder setStoreFileCount(int value) {
this.storeFileCount = value;
return this;
}
public RegionMetricsBuilder setStoreRefCount(int value) {
this.storeRefCount = value;
return this;
}
public RegionMetricsBuilder setMaxCompactedStoreFileRefCount(int value) {
this.maxCompactedStoreFileRefCount = value;
return this;
}
public RegionMetricsBuilder setCompactingCellCount(long value) {
this.compactingCellCount = value;
return this;
}
public RegionMetricsBuilder setCompactedCellCount(long value) {
this.compactedCellCount = value;
return this;
}
public RegionMetricsBuilder setStoreFileSize(Size value) {
this.storeFileSize = value;
return this;
}
public RegionMetricsBuilder setMemStoreSize(Size value) {
this.memStoreSize = value;
return this;
}
public RegionMetricsBuilder setStoreFileIndexSize(Size value) {
this.indexSize = value;
return this;
}
public RegionMetricsBuilder setStoreFileRootLevelIndexSize(Size value) {
this.rootLevelIndexSize = value;
return this;
}
public RegionMetricsBuilder setStoreFileUncompressedDataIndexSize(Size value) {
this.uncompressedDataIndexSize = value;
return this;
}
public RegionMetricsBuilder setBloomFilterSize(Size value) {
this.bloomFilterSize = value;
return this;
}
public RegionMetricsBuilder setUncompressedStoreFileSize(Size value) {
this.uncompressedStoreFileSize = value;
return this;
}
public RegionMetricsBuilder setWriteRequestCount(long value) {
this.writeRequestCount = value;
return this;
}
public RegionMetricsBuilder setReadRequestCount(long value) {
this.readRequestCount = value;
return this;
}
public RegionMetricsBuilder setCpRequestCount(long value) {
this.cpRequestCount = value;
return this;
}
public RegionMetricsBuilder setFilteredReadRequestCount(long value) {
this.filteredReadRequestCount = value;
return this;
}
public RegionMetricsBuilder setCompletedSequenceId(long value) {
this.completedSequenceId = value;
return this;
}
public RegionMetricsBuilder setStoreSequenceIds(Map<byte[], Long> value) {
this.storeSequenceIds = value;
return this;
}
public RegionMetricsBuilder setDataLocality(float value) {
this.dataLocality = value;
return this;
}
public RegionMetricsBuilder setLastMajorCompactionTimestamp(long value) {
this.lastMajorCompactionTimestamp = value;
return this;
}
public RegionMetricsBuilder setDataLocalityForSsd(float value) {
this.dataLocalityForSsd = value;
return this;
}
public RegionMetricsBuilder setBlocksLocalWeight(long value) {
this.blocksLocalWeight = value;
return this;
}
public RegionMetricsBuilder setBlocksLocalWithSsdWeight(long value) {
this.blocksLocalWithSsdWeight = value;
return this;
}
public RegionMetricsBuilder setBlocksTotalWeight(long value) {
this.blocksTotalWeight = value;
return this;
}
public RegionMetricsBuilder setCompactionState(CompactionState compactionState) {
this.compactionState = compactionState;
return this;
}
public RegionMetrics build() {
return new RegionMetricsImpl(name,
storeCount,
storeFileCount,
storeRefCount,
maxCompactedStoreFileRefCount,
compactingCellCount,
compactedCellCount,
storeFileSize,
memStoreSize,
indexSize,
rootLevelIndexSize,
uncompressedDataIndexSize,
bloomFilterSize,
uncompressedStoreFileSize,
writeRequestCount,
readRequestCount,
cpRequestCount,
filteredReadRequestCount,
completedSequenceId,
storeSequenceIds,
dataLocality,
lastMajorCompactionTimestamp,
dataLocalityForSsd,
blocksLocalWeight,
blocksLocalWithSsdWeight,
blocksTotalWeight,
compactionState);
}
private static class RegionMetricsImpl implements RegionMetrics {
private final byte[] name;
private final int storeCount;
private final int storeFileCount;
private final int storeRefCount;
private final int maxCompactedStoreFileRefCount;
private final long compactingCellCount;
private final long compactedCellCount;
private final Size storeFileSize;
private final Size memStoreSize;
private final Size indexSize;
private final Size rootLevelIndexSize;
private final Size uncompressedDataIndexSize;
private final Size bloomFilterSize;
private final Size uncompressedStoreFileSize;
private final long writeRequestCount;
private final long readRequestCount;
private final long cpRequestCount;
private final long filteredReadRequestCount;
private final long completedSequenceId;
private final Map<byte[], Long> storeSequenceIds;
private final float dataLocality;
private final long lastMajorCompactionTimestamp;
private final float dataLocalityForSsd;
private final long blocksLocalWeight;
private final long blocksLocalWithSsdWeight;
private final long blocksTotalWeight;
private final CompactionState compactionState;
RegionMetricsImpl(byte[] name,
int storeCount,
int storeFileCount,
int storeRefCount,
int maxCompactedStoreFileRefCount,
final long compactingCellCount,
long compactedCellCount,
Size storeFileSize,
Size memStoreSize,
Size indexSize,
Size rootLevelIndexSize,
Size uncompressedDataIndexSize,
Size bloomFilterSize,
Size uncompressedStoreFileSize,
long writeRequestCount,
long readRequestCount,
long cpRequestCount,
long filteredReadRequestCount,
long completedSequenceId,
Map<byte[], Long> storeSequenceIds,
float dataLocality,
long lastMajorCompactionTimestamp,
float dataLocalityForSsd,
long blocksLocalWeight,
long blocksLocalWithSsdWeight,
long blocksTotalWeight,
CompactionState compactionState) {
this.name = Preconditions.checkNotNull(name);
this.storeCount = storeCount;
this.storeFileCount = storeFileCount;
this.storeRefCount = storeRefCount;
this.maxCompactedStoreFileRefCount = maxCompactedStoreFileRefCount;
this.compactingCellCount = compactingCellCount;
this.compactedCellCount = compactedCellCount;
this.storeFileSize = Preconditions.checkNotNull(storeFileSize);
this.memStoreSize = Preconditions.checkNotNull(memStoreSize);
this.indexSize = Preconditions.checkNotNull(indexSize);
this.rootLevelIndexSize = Preconditions.checkNotNull(rootLevelIndexSize);
this.uncompressedDataIndexSize = Preconditions.checkNotNull(uncompressedDataIndexSize);
this.bloomFilterSize = Preconditions.checkNotNull(bloomFilterSize);
this.uncompressedStoreFileSize = Preconditions.checkNotNull(uncompressedStoreFileSize);
this.writeRequestCount = writeRequestCount;
this.readRequestCount = readRequestCount;
this.cpRequestCount = cpRequestCount;
this.filteredReadRequestCount = filteredReadRequestCount;
this.completedSequenceId = completedSequenceId;
this.storeSequenceIds = Preconditions.checkNotNull(storeSequenceIds);
this.dataLocality = dataLocality;
this.lastMajorCompactionTimestamp = lastMajorCompactionTimestamp;
this.dataLocalityForSsd = dataLocalityForSsd;
this.blocksLocalWeight = blocksLocalWeight;
this.blocksLocalWithSsdWeight = blocksLocalWithSsdWeight;
this.blocksTotalWeight = blocksTotalWeight;
this.compactionState = compactionState;
}
@Override
public byte[] getRegionName() {
return name;
}
@Override
public int getStoreCount() {
return storeCount;
}
@Override
public int getStoreFileCount() {
return storeFileCount;
}
@Override
public int getStoreRefCount() {
return storeRefCount;
}
@Override
public int getMaxCompactedStoreFileRefCount() {
return maxCompactedStoreFileRefCount;
}
@Override
public Size getStoreFileSize() {
return storeFileSize;
}
@Override
public Size getMemStoreSize() {
return memStoreSize;
}
@Override
public long getReadRequestCount() {
return readRequestCount;
}
@Override
public long getCpRequestCount() {
return cpRequestCount;
}
@Override
public long getFilteredReadRequestCount() {
return filteredReadRequestCount;
}
@Override
public long getWriteRequestCount() {
return writeRequestCount;
}
@Override
public Size getStoreFileIndexSize() {
return indexSize;
}
@Override
public Size getStoreFileRootLevelIndexSize() {
return rootLevelIndexSize;
}
@Override
public Size getStoreFileUncompressedDataIndexSize() {
return uncompressedDataIndexSize;
}
@Override
public Size getBloomFilterSize() {
return bloomFilterSize;
}
@Override
public long getCompactingCellCount() {
return compactingCellCount;
}
@Override
public long getCompactedCellCount() {
return compactedCellCount;
}
@Override
public long getCompletedSequenceId() {
return completedSequenceId;
}
@Override
public Map<byte[], Long> getStoreSequenceId() {
return Collections.unmodifiableMap(storeSequenceIds);
}
@Override
public Size getUncompressedStoreFileSize() {
return uncompressedStoreFileSize;
}
@Override
public float getDataLocality() {
return dataLocality;
}
@Override
public long getLastMajorCompactionTimestamp() {
return lastMajorCompactionTimestamp;
}
@Override
public float getDataLocalityForSsd() {
return dataLocalityForSsd;
}
@Override
public long getBlocksLocalWeight() {
return blocksLocalWeight;
}
@Override
public long getBlocksLocalWithSsdWeight() {
return blocksLocalWithSsdWeight;
}
@Override
public long getBlocksTotalWeight() {
return blocksTotalWeight;
}
@Override
public CompactionState getCompactionState() {
return compactionState;
}
@Override
public String toString() {
StringBuilder sb = Strings.appendKeyValue(new StringBuilder(), "storeCount",
this.getStoreCount());
Strings.appendKeyValue(sb, "storeFileCount",
this.getStoreFileCount());
Strings.appendKeyValue(sb, "storeRefCount",
this.getStoreRefCount());
Strings.appendKeyValue(sb, "maxCompactedStoreFileRefCount",
this.getMaxCompactedStoreFileRefCount());
Strings.appendKeyValue(sb, "uncompressedStoreFileSize",
this.getUncompressedStoreFileSize());
Strings.appendKeyValue(sb, "lastMajorCompactionTimestamp",
this.getLastMajorCompactionTimestamp());
Strings.appendKeyValue(sb, "storeFileSize",
this.getStoreFileSize());
if (this.getUncompressedStoreFileSize().get() != 0) {
Strings.appendKeyValue(sb, "compressionRatio",
String.format("%.4f",
(float) this.getStoreFileSize().get(Size.Unit.MEGABYTE) /
(float) this.getUncompressedStoreFileSize().get(Size.Unit.MEGABYTE)));
}
Strings.appendKeyValue(sb, "memStoreSize",
this.getMemStoreSize());
Strings.appendKeyValue(sb, "readRequestCount",
this.getReadRequestCount());
Strings.appendKeyValue(sb, "cpRequestCount",
this.getCpRequestCount());
Strings.appendKeyValue(sb, "writeRequestCount",
this.getWriteRequestCount());
Strings.appendKeyValue(sb, "rootLevelIndexSize",
this.getStoreFileRootLevelIndexSize());
Strings.appendKeyValue(sb, "uncompressedDataIndexSize",
this.getStoreFileUncompressedDataIndexSize());
Strings.appendKeyValue(sb, "bloomFilterSize",
this.getBloomFilterSize());
Strings.appendKeyValue(sb, "compactingCellCount",
this.getCompactingCellCount());
Strings.appendKeyValue(sb, "compactedCellCount",
this.getCompactedCellCount());
float compactionProgressPct = Float.NaN;
if (this.getCompactingCellCount() > 0) {
compactionProgressPct = ((float) this.getCompactedCellCount() /
(float) this.getCompactingCellCount());
}
Strings.appendKeyValue(sb, "compactionProgressPct",
compactionProgressPct);
Strings.appendKeyValue(sb, "completedSequenceId",
this.getCompletedSequenceId());
Strings.appendKeyValue(sb, "dataLocality",
this.getDataLocality());
Strings.appendKeyValue(sb, "dataLocalityForSsd",
this.getDataLocalityForSsd());
Strings.appendKeyValue(sb, "blocksLocalWeight",
blocksLocalWeight);
Strings.appendKeyValue(sb, "blocksLocalWithSsdWeight",
blocksLocalWithSsdWeight);
Strings.appendKeyValue(sb, "blocksTotalWeight",
blocksTotalWeight);
Strings.appendKeyValue(sb, "compactionState",
compactionState);
return sb.toString();
}
}
}
| |
/*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2014 Eric Lafortune (eric@graphics.cornell.edu)
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.io;
import proguard.classfile.ClassConstants;
import java.io.*;
import java.util.*;
import java.util.jar.*;
import java.util.zip.*;
/**
* This DataEntryWriter sends data entries to a given jar/zip file.
* The manifest and comment properties can optionally be set.
*
* @author Eric Lafortune
*/
public class JarWriter implements DataEntryWriter, Finisher
{
private final DataEntryWriter dataEntryWriter;
private final Manifest manifest;
private final String comment;
private OutputStream currentParentOutputStream;
private ZipOutputStream currentJarOutputStream;
private Finisher currentFinisher;
private DataEntry currentDataEntry;
// The names of the jar entries that are already in the jar.
private final Set jarEntryNames = new HashSet();
/**
* Creates a new JarWriter without manifest or comment.
*/
public JarWriter(DataEntryWriter dataEntryWriter)
{
this(dataEntryWriter, null, null);
}
/**
* Creates a new JarWriter.
*/
public JarWriter(DataEntryWriter dataEntryWriter,
Manifest manifest,
String comment)
{
this.dataEntryWriter = dataEntryWriter;
this.manifest = manifest;
this.comment = comment;
}
// Implementations for DataEntryWriter.
public boolean createDirectory(DataEntry dataEntry) throws IOException
{
// Make sure we can start with a new entry.
if (!prepareEntry(dataEntry))
{
return false;
}
// Close the previous ZIP entry, if any.
closeEntry();
// Get the directory entry name.
String name = dataEntry.getName() + ClassConstants.PACKAGE_SEPARATOR;
// We have to check if the name is already used, because
// ZipOutputStream doesn't handle this case properly (it throws
// an exception which can be caught, but the ZipDataEntry is
// remembered anyway).
if (jarEntryNames.add(name))
{
// Create a new directory entry.
currentJarOutputStream.putNextEntry(new ZipEntry(name));
currentJarOutputStream.closeEntry();
}
// Clear the finisher.
currentFinisher = null;
currentDataEntry = null;
return true;
}
public OutputStream getOutputStream(DataEntry dataEntry) throws IOException
{
return getOutputStream(dataEntry, null);
}
public OutputStream getOutputStream(DataEntry dataEntry,
Finisher finisher) throws IOException
{
//Make sure we can start with a new entry.
if (!prepareEntry(dataEntry))
{
return null;
}
// Do we need a new entry?
if (!dataEntry.equals(currentDataEntry))
{
// Close the previous ZIP entry, if any.
closeEntry();
// Get the entry name.
String name = dataEntry.getName();
// We have to check if the name is already used, because
// ZipOutputStream doesn't handle this case properly (it throws
// an exception which can be caught, but the ZipDataEntry is
// remembered anyway).
if (!jarEntryNames.add(name))
{
throw new IOException("Duplicate zip entry ["+dataEntry+"]");
}
// Create a new entry.
currentJarOutputStream.putNextEntry(new ZipEntry(name));
// Set up the finisher for the entry.
currentFinisher = finisher;
currentDataEntry = dataEntry;
}
return currentJarOutputStream;
}
public void finish() throws IOException
{
// Finish the entire ZIP stream, if any.
if (currentJarOutputStream != null)
{
// Close the previous ZIP entry, if any.
closeEntry();
// Finish the entire ZIP stream.
currentJarOutputStream.finish();
currentJarOutputStream = null;
currentParentOutputStream = null;
jarEntryNames.clear();
}
}
public void close() throws IOException
{
// Close the parent stream.
dataEntryWriter.close();
}
// Small utility methods.
/**
* Makes sure the current output stream is set up for the given entry.
*/
private boolean prepareEntry(DataEntry dataEntry) throws IOException
{
// Get the parent stream, new or existing.
// This may finish our own jar output stream.
OutputStream parentOutputStream =
dataEntryWriter.getOutputStream(dataEntry.getParent(), this);
// Did we get a stream?
if (parentOutputStream == null)
{
return false;
}
// Do we need a new stream?
if (currentParentOutputStream == null)
{
currentParentOutputStream = parentOutputStream;
// Create a new jar stream, with a manifest, if set.
currentJarOutputStream = manifest != null ?
new JarOutputStream(parentOutputStream, manifest) :
new ZipOutputStream(parentOutputStream);
// Add a comment, if set.
if (comment != null)
{
currentJarOutputStream.setComment(comment);
}
}
return true;
}
/**
* Closes the previous ZIP entry, if any.
*/
private void closeEntry() throws IOException
{
if (currentDataEntry != null)
{
// Let any finisher finish up first.
if (currentFinisher != null)
{
currentFinisher.finish();
currentFinisher = null;
}
currentJarOutputStream.closeEntry();
currentDataEntry = null;
}
}
}
| |
/*
* Copyright (C) 2006 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.base;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.GwtCompatible;
import java.io.Serializable;
import javax.annotation.Nullable;
/**
* Utility class for converting between various ASCII case formats. Behavior is undefined for
* non-ASCII input.
*
* @author Mike Bostock
* @since 1.0
*/
@GwtCompatible
public enum CaseFormat {
/**
* Hyphenated variable naming convention, e.g., "lower-hyphen".
*/
LOWER_HYPHEN(CharMatcher.is('-'), "-") {
@Override
String normalizeWord(String word) {
return Ascii.toLowerCase(word);
}
@Override
String convert(CaseFormat format, String s) {
if (format == LOWER_UNDERSCORE) {
return s.replace('-', '_');
}
if (format == UPPER_UNDERSCORE) {
return Ascii.toUpperCase(s.replace('-', '_'));
}
return super.convert(format, s);
}
},
/**
* C++ variable naming convention, e.g., "lower_underscore".
*/
LOWER_UNDERSCORE(CharMatcher.is('_'), "_") {
@Override
String normalizeWord(String word) {
return Ascii.toLowerCase(word);
}
@Override
String convert(CaseFormat format, String s) {
if (format == LOWER_HYPHEN) {
return s.replace('_', '-');
}
if (format == UPPER_UNDERSCORE) {
return Ascii.toUpperCase(s);
}
return super.convert(format, s);
}
},
/**
* Java variable naming convention, e.g., "lowerCamel".
*/
LOWER_CAMEL(CharMatcher.inRange('A', 'Z'), "") {
@Override
String normalizeWord(String word) {
return firstCharOnlyToUpper(word);
}
},
/**
* Java and C++ class naming convention, e.g., "UpperCamel".
*/
UPPER_CAMEL(CharMatcher.inRange('A', 'Z'), "") {
@Override
String normalizeWord(String word) {
return firstCharOnlyToUpper(word);
}
},
/**
* Java and C++ constant naming convention, e.g., "UPPER_UNDERSCORE".
*/
UPPER_UNDERSCORE(CharMatcher.is('_'), "_") {
@Override
String normalizeWord(String word) {
return Ascii.toUpperCase(word);
}
@Override
String convert(CaseFormat format, String s) {
if (format == LOWER_HYPHEN) {
return Ascii.toLowerCase(s.replace('_', '-'));
}
if (format == LOWER_UNDERSCORE) {
return Ascii.toLowerCase(s);
}
return super.convert(format, s);
}
};
private final CharMatcher wordBoundary;
private final String wordSeparator;
CaseFormat(CharMatcher wordBoundary, String wordSeparator) {
this.wordBoundary = wordBoundary;
this.wordSeparator = wordSeparator;
}
/**
* Converts the specified {@code String str} from this format to the specified {@code format}. A
* "best effort" approach is taken; if {@code str} does not conform to the assumed format, then
* the behavior of this method is undefined but we make a reasonable effort at converting anyway.
*/
public final String to(CaseFormat format, String str) {
checkNotNull(format);
checkNotNull(str);
return (format == this) ? str : convert(format, str);
}
/**
* Enum values can override for performance reasons.
*/
String convert(CaseFormat format, String s) {
// deal with camel conversion
StringBuilder out = null;
int i = 0;
int j = -1;
while ((j = wordBoundary.indexIn(s, ++j)) != -1) {
if (i == 0) {
// include some extra space for separators
out = new StringBuilder(s.length() + 4 * wordSeparator.length());
out.append(format.normalizeFirstWord(s.substring(i, j)));
} else {
out.append(format.normalizeWord(s.substring(i, j)));
}
out.append(format.wordSeparator);
i = j + wordSeparator.length();
}
return (i == 0)
? format.normalizeFirstWord(s)
: out.append(format.normalizeWord(s.substring(i))).toString();
}
/**
* Returns a {@code Converter} that converts strings from this format to {@code targetFormat}.
*
* @since 16.0
*/
public Converter<String, String> converterTo(CaseFormat targetFormat) {
return new StringConverter(this, targetFormat);
}
private static final class StringConverter extends Converter<String, String>
implements Serializable {
private final CaseFormat sourceFormat;
private final CaseFormat targetFormat;
StringConverter(CaseFormat sourceFormat, CaseFormat targetFormat) {
this.sourceFormat = checkNotNull(sourceFormat);
this.targetFormat = checkNotNull(targetFormat);
}
@Override
protected String doForward(String s) {
return sourceFormat.to(targetFormat, s);
}
@Override
protected String doBackward(String s) {
return targetFormat.to(sourceFormat, s);
}
@Override
public boolean equals(@Nullable Object object) {
if (object instanceof StringConverter) {
StringConverter that = (StringConverter) object;
return sourceFormat.equals(that.sourceFormat) && targetFormat.equals(that.targetFormat);
}
return false;
}
@Override
public int hashCode() {
return sourceFormat.hashCode() ^ targetFormat.hashCode();
}
@Override
public String toString() {
return sourceFormat + ".converterTo(" + targetFormat + ")";
}
private static final long serialVersionUID = 0L;
}
abstract String normalizeWord(String word);
private String normalizeFirstWord(String word) {
return (this == LOWER_CAMEL) ? Ascii.toLowerCase(word) : normalizeWord(word);
}
private static String firstCharOnlyToUpper(String word) {
return (word.isEmpty())
? word
: Ascii.toUpperCase(word.charAt(0)) + Ascii.toLowerCase(word.substring(1));
}
}
| |
/**
*/
package gluemodel.substationStandard.Dataclasses.impl;
import gluemodel.substationStandard.Dataclasses.DataclassesPackage;
import gluemodel.substationStandard.Dataclasses.DetailQual;
import gluemodel.substationStandard.Dataclasses.Quality;
import gluemodel.substationStandard.Enumerations.SourceKind;
import gluemodel.substationStandard.Enumerations.ValidityKind;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Quality</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link gluemodel.substationStandard.Dataclasses.impl.QualityImpl#isTest <em>Test</em>}</li>
* <li>{@link gluemodel.substationStandard.Dataclasses.impl.QualityImpl#isOperatorBlocked <em>Operator Blocked</em>}</li>
* <li>{@link gluemodel.substationStandard.Dataclasses.impl.QualityImpl#getValidity <em>Validity</em>}</li>
* <li>{@link gluemodel.substationStandard.Dataclasses.impl.QualityImpl#getSource <em>Source</em>}</li>
* <li>{@link gluemodel.substationStandard.Dataclasses.impl.QualityImpl#getDetailQual <em>Detail Qual</em>}</li>
* </ul>
*
* @generated
*/
public class QualityImpl extends MinimalEObjectImpl.Container implements Quality {
/**
* The default value of the '{@link #isTest() <em>Test</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isTest()
* @generated
* @ordered
*/
protected static final boolean TEST_EDEFAULT = false;
/**
* The cached value of the '{@link #isTest() <em>Test</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isTest()
* @generated
* @ordered
*/
protected boolean test = TEST_EDEFAULT;
/**
* The default value of the '{@link #isOperatorBlocked() <em>Operator Blocked</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isOperatorBlocked()
* @generated
* @ordered
*/
protected static final boolean OPERATOR_BLOCKED_EDEFAULT = false;
/**
* The cached value of the '{@link #isOperatorBlocked() <em>Operator Blocked</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isOperatorBlocked()
* @generated
* @ordered
*/
protected boolean operatorBlocked = OPERATOR_BLOCKED_EDEFAULT;
/**
* The default value of the '{@link #getValidity() <em>Validity</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getValidity()
* @generated
* @ordered
*/
protected static final ValidityKind VALIDITY_EDEFAULT = ValidityKind.GOOD;
/**
* The cached value of the '{@link #getValidity() <em>Validity</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getValidity()
* @generated
* @ordered
*/
protected ValidityKind validity = VALIDITY_EDEFAULT;
/**
* The default value of the '{@link #getSource() <em>Source</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSource()
* @generated
* @ordered
*/
protected static final SourceKind SOURCE_EDEFAULT = SourceKind.PROCESS;
/**
* The cached value of the '{@link #getSource() <em>Source</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSource()
* @generated
* @ordered
*/
protected SourceKind source = SOURCE_EDEFAULT;
/**
* The cached value of the '{@link #getDetailQual() <em>Detail Qual</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDetailQual()
* @generated
* @ordered
*/
protected DetailQual detailQual;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected QualityImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return DataclassesPackage.Literals.QUALITY;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isTest() {
return test;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTest(boolean newTest) {
boolean oldTest = test;
test = newTest;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DataclassesPackage.QUALITY__TEST, oldTest, test));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isOperatorBlocked() {
return operatorBlocked;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setOperatorBlocked(boolean newOperatorBlocked) {
boolean oldOperatorBlocked = operatorBlocked;
operatorBlocked = newOperatorBlocked;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DataclassesPackage.QUALITY__OPERATOR_BLOCKED, oldOperatorBlocked, operatorBlocked));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ValidityKind getValidity() {
return validity;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setValidity(ValidityKind newValidity) {
ValidityKind oldValidity = validity;
validity = newValidity == null ? VALIDITY_EDEFAULT : newValidity;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DataclassesPackage.QUALITY__VALIDITY, oldValidity, validity));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public SourceKind getSource() {
return source;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSource(SourceKind newSource) {
SourceKind oldSource = source;
source = newSource == null ? SOURCE_EDEFAULT : newSource;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DataclassesPackage.QUALITY__SOURCE, oldSource, source));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public DetailQual getDetailQual() {
if (detailQual != null && detailQual.eIsProxy()) {
InternalEObject oldDetailQual = (InternalEObject)detailQual;
detailQual = (DetailQual)eResolveProxy(oldDetailQual);
if (detailQual != oldDetailQual) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, DataclassesPackage.QUALITY__DETAIL_QUAL, oldDetailQual, detailQual));
}
}
return detailQual;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public DetailQual basicGetDetailQual() {
return detailQual;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setDetailQual(DetailQual newDetailQual) {
DetailQual oldDetailQual = detailQual;
detailQual = newDetailQual;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DataclassesPackage.QUALITY__DETAIL_QUAL, oldDetailQual, detailQual));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case DataclassesPackage.QUALITY__TEST:
return isTest();
case DataclassesPackage.QUALITY__OPERATOR_BLOCKED:
return isOperatorBlocked();
case DataclassesPackage.QUALITY__VALIDITY:
return getValidity();
case DataclassesPackage.QUALITY__SOURCE:
return getSource();
case DataclassesPackage.QUALITY__DETAIL_QUAL:
if (resolve) return getDetailQual();
return basicGetDetailQual();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case DataclassesPackage.QUALITY__TEST:
setTest((Boolean)newValue);
return;
case DataclassesPackage.QUALITY__OPERATOR_BLOCKED:
setOperatorBlocked((Boolean)newValue);
return;
case DataclassesPackage.QUALITY__VALIDITY:
setValidity((ValidityKind)newValue);
return;
case DataclassesPackage.QUALITY__SOURCE:
setSource((SourceKind)newValue);
return;
case DataclassesPackage.QUALITY__DETAIL_QUAL:
setDetailQual((DetailQual)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case DataclassesPackage.QUALITY__TEST:
setTest(TEST_EDEFAULT);
return;
case DataclassesPackage.QUALITY__OPERATOR_BLOCKED:
setOperatorBlocked(OPERATOR_BLOCKED_EDEFAULT);
return;
case DataclassesPackage.QUALITY__VALIDITY:
setValidity(VALIDITY_EDEFAULT);
return;
case DataclassesPackage.QUALITY__SOURCE:
setSource(SOURCE_EDEFAULT);
return;
case DataclassesPackage.QUALITY__DETAIL_QUAL:
setDetailQual((DetailQual)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case DataclassesPackage.QUALITY__TEST:
return test != TEST_EDEFAULT;
case DataclassesPackage.QUALITY__OPERATOR_BLOCKED:
return operatorBlocked != OPERATOR_BLOCKED_EDEFAULT;
case DataclassesPackage.QUALITY__VALIDITY:
return validity != VALIDITY_EDEFAULT;
case DataclassesPackage.QUALITY__SOURCE:
return source != SOURCE_EDEFAULT;
case DataclassesPackage.QUALITY__DETAIL_QUAL:
return detailQual != null;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (test: ");
result.append(test);
result.append(", operatorBlocked: ");
result.append(operatorBlocked);
result.append(", validity: ");
result.append(validity);
result.append(", source: ");
result.append(source);
result.append(')');
return result.toString();
}
} //QualityImpl
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.printer;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Map;
import javax.print.Doc;
import javax.print.DocFlavor;
import javax.print.DocPrintJob;
import javax.print.PrintService;
import javax.print.PrintServiceLookup;
import javax.print.attribute.Attribute;
import javax.print.attribute.PrintRequestAttributeSet;
import javax.print.attribute.standard.Media;
import javax.print.attribute.standard.MediaSizeName;
import javax.print.attribute.standard.MediaTray;
import javax.print.attribute.standard.OrientationRequested;
import javax.print.attribute.standard.Sides;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.apache.camel.util.IOHelper;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class PrinterPrintTest extends CamelTestSupport {
@Before
public void setup() {
setupJavaPrint();
}
@Override
public boolean isUseRouteBuilder() {
return false;
}
// Check if there is an awt library
private boolean isAwtHeadless() {
return Boolean.getBoolean("java.awt.headless");
}
private void sendFile() throws Exception {
template.send("direct:start", new Processor() {
public void process(Exchange exchange) throws Exception {
// Read from an input stream
InputStream is = IOHelper.buffered(new FileInputStream("src/test/resources/test.txt"));
byte buffer[] = new byte[is.available()];
int n = is.available();
for (int i = 0; i < n; i++) {
buffer[i] = (byte) is.read();
}
is.close();
// Set the property of the charset encoding
exchange.setProperty(Exchange.CHARSET_NAME, "UTF-8");
Message in = exchange.getIn();
in.setBody(buffer);
}
});
}
private void sendGIF() throws Exception {
template.send("direct:start", new Processor() {
public void process(Exchange exchange) throws Exception {
// Read from an input stream
InputStream is = IOHelper.buffered(new FileInputStream("src/test/resources/asf-logo.gif"));
byte buffer[] = new byte[is.available()];
int n = is.available();
for (int i = 0; i < n; i++) {
buffer[i] = (byte) is.read();
}
is.close();
// Set the property of the charset encoding
exchange.setProperty(Exchange.CHARSET_NAME, "UTF-8");
Message in = exchange.getIn();
in.setBody(buffer);
}
});
}
private void sendJPEG() throws Exception {
template.send("direct:start", new Processor() {
public void process(Exchange exchange) throws Exception {
// Read from an input stream
InputStream is = IOHelper.buffered(new FileInputStream("src/test/resources/asf-logo.JPG"));
byte buffer[] = new byte[is.available()];
int n = is.available();
for (int i = 0; i < n; i++) {
buffer[i] = (byte) is.read();
}
is.close();
// Set the property of the charset encoding
exchange.setProperty(Exchange.CHARSET_NAME, "UTF-8");
Message in = exchange.getIn();
in.setBody(buffer);
}
});
}
@Test
@Ignore
public void testSendingFileToPrinter() throws Exception {
if (isAwtHeadless()) {
return;
}
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:start").
to("lpr://localhost/default?copies=1&flavor=DocFlavor.BYTE_ARRAY&mimeType=AUTOSENSE&mediaSize=na-letter&sides=one-sided&sendToPrinter=false");
}
});
context.start();
sendFile();
}
@Test
@Ignore
public void testSendingGIFToPrinter() throws Exception {
if (isAwtHeadless()) {
return;
}
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:start").
to("lpr://localhost/default?flavor=DocFlavor.INPUT_STREAM&mimeType=GIF&mediaSize=na-letter&sides=one-sided&sendToPrinter=false");
}
});
context.start();
sendGIF();
}
@Test
@Ignore
public void testSendingJPEGToPrinter() throws Exception {
if (isAwtHeadless()) {
return;
}
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:start").to("lpr://localhost/default?copies=2&flavor=DocFlavor.INPUT_STREAM"
+ "&mimeType=JPEG&mediaSize=na-letter&sides=one-sided&sendToPrinter=false");
}
});
context.start();
sendJPEG();
}
@Test
@Ignore
public void testSendingJPEGToPrinterWithLandscapePageOrientation() throws Exception {
if (isAwtHeadless()) {
return;
}
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:start").to("lpr://localhost/default?flavor=DocFlavor.INPUT_STREAM"
+ "&mimeType=JPEG&sendToPrinter=false&orientation=landscape");
}
});
context.start();
sendJPEG();
}
/**
* Test for resolution of bug CAMEL-3446.
* Not specifying mediaSize nor sides attributes make it use
* default values when starting the route.
*/
@Test
@Ignore
public void testDefaultPrinterConfiguration() throws Exception {
if (isAwtHeadless()) {
return;
}
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:start").to("lpr://localhost/default?sendToPrinter=false");
}
});
context.start();
}
@Test
public void moreThanOneLprEndpoint() throws Exception {
if (isAwtHeadless()) {
return;
}
int numberOfPrintservicesBefore = PrintServiceLookup.lookupPrintServices(null, null).length;
// setup javax.print
PrintService ps1 = mock(PrintService.class);
when(ps1.getName()).thenReturn("printer1");
when(ps1.isDocFlavorSupported(any(DocFlavor.class))).thenReturn(Boolean.TRUE);
PrintService ps2 = mock(PrintService.class);
when(ps2.getName()).thenReturn("printer2");
boolean res1 = PrintServiceLookup.registerService(ps1);
assertTrue("PrintService #1 should be registered.", res1);
boolean res2 = PrintServiceLookup.registerService(ps2);
assertTrue("PrintService #2 should be registered.", res2);
PrintService[] pss = PrintServiceLookup.lookupPrintServices(null, null);
assertEquals("lookup should report two PrintServices.", numberOfPrintservicesBefore + 2, pss.length);
DocPrintJob job1 = mock(DocPrintJob.class);
when(ps1.createPrintJob()).thenReturn(job1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:start1").to("lpr://localhost/printer1?sendToPrinter=true");
from("direct:start2").to("lpr://localhost/printer2?sendToPrinter=false");
}
});
context.start();
// Are there two different PrintConfigurations?
Map<String, Endpoint> epm = context().getEndpointMap();
assertEquals("Four endpoints", 4, epm.size());
Endpoint lp1 = null;
Endpoint lp2 = null;
for (Map.Entry<String, Endpoint> ep : epm.entrySet()) {
if (ep.getKey().contains("printer1")) {
lp1 = ep.getValue();
}
if (ep.getKey().contains("printer2")) {
lp2 = ep.getValue();
}
}
assertNotNull(lp1);
assertNotNull(lp2);
assertEquals("printer1", ((PrinterEndpoint) lp1).getConfig().getPrintername());
assertEquals("printer2", ((PrinterEndpoint) lp2).getConfig().getPrintername());
template.sendBody("direct:start1", "Hello Printer 1");
context.stop();
verify(job1, times(1)).print(any(Doc.class), any(PrintRequestAttributeSet.class));
}
@Test
public void printerNameTest() throws Exception {
if (isAwtHeadless()) {
return;
}
// setup javax.print
PrintService ps1 = mock(PrintService.class);
when(ps1.getName()).thenReturn("MyPrinter\\\\remote\\printer1");
when(ps1.isDocFlavorSupported(any(DocFlavor.class))).thenReturn(Boolean.TRUE);
boolean res1 = PrintServiceLookup.registerService(ps1);
assertTrue("The Remote PrintService #1 should be registered.", res1);
DocPrintJob job1 = mock(DocPrintJob.class);
when(ps1.createPrintJob()).thenReturn(job1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:start1").to("lpr://remote/printer1?sendToPrinter=true");
}
});
context.start();
template.sendBody("direct:start1", "Hello Printer 1");
context.stop();
verify(job1, times(1)).print(any(Doc.class), any(PrintRequestAttributeSet.class));
}
@Test
public void setJobName() throws Exception {
if (isAwtHeadless()) {
return;
}
getMockEndpoint("mock:output").setExpectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:start").to("lpr://localhost/default").to("mock:output");
}
});
context.start();
template.sendBodyAndHeader("direct:start", "Hello Printer", PrinterEndpoint.JOB_NAME, "Test-Job-Name");
context.stop();
assertMockEndpointsSatisfied();
}
@Test
public void printToMiddleTray() throws Exception {
PrinterEndpoint endpoint = new PrinterEndpoint();
PrinterConfiguration configuration = new PrinterConfiguration();
configuration.setHostname("localhost");
configuration.setPort(631);
configuration.setPrintername("DefaultPrinter");
configuration.setMediaSizeName(MediaSizeName.ISO_A4);
configuration.setInternalSides(Sides.ONE_SIDED);
configuration.setInternalOrientation(OrientationRequested.PORTRAIT);
configuration.setMediaTray("middle");
PrinterProducer producer = new PrinterProducer(endpoint, configuration);
producer.start();
PrinterOperations printerOperations = producer.getPrinterOperations();
PrintRequestAttributeSet attributeSet = printerOperations.getPrintRequestAttributeSet();
Attribute attribute = attributeSet.get(javax.print.attribute.standard.Media.class);
assertNotNull(attribute);
assertTrue(attribute instanceof MediaTray);
MediaTray mediaTray = (MediaTray) attribute;
assertEquals("middle", mediaTray.toString());
}
@Test
public void printsWithLandscapeOrientation() throws Exception {
PrinterEndpoint endpoint = new PrinterEndpoint();
PrinterConfiguration configuration = new PrinterConfiguration();
configuration.setHostname("localhost");
configuration.setPort(631);
configuration.setPrintername("DefaultPrinter");
configuration.setMediaSizeName(MediaSizeName.ISO_A4);
configuration.setInternalSides(Sides.ONE_SIDED);
configuration.setInternalOrientation(OrientationRequested.REVERSE_LANDSCAPE);
configuration.setMediaTray("middle");
configuration.setSendToPrinter(false);
PrinterProducer producer = new PrinterProducer(endpoint, configuration);
producer.start();
PrinterOperations printerOperations = producer.getPrinterOperations();
PrintRequestAttributeSet attributeSet = printerOperations.getPrintRequestAttributeSet();
Attribute attribute = attributeSet.get(OrientationRequested.class);
assertNotNull(attribute);
assertEquals("reverse-landscape", attribute.toString());
}
protected void setupJavaPrint() {
// "install" another default printer
PrintService psDefault = mock(PrintService.class);
when(psDefault.getName()).thenReturn("DefaultPrinter");
when(psDefault.isDocFlavorSupported(any(DocFlavor.class))).thenReturn(Boolean.TRUE);
PrintServiceLookup psLookup = mock(PrintServiceLookup.class);
when(psLookup.getPrintServices()).thenReturn(new PrintService[]{psDefault});
when(psLookup.getDefaultPrintService()).thenReturn(psDefault);
DocPrintJob docPrintJob = mock(DocPrintJob.class);
when(psDefault.createPrintJob()).thenReturn(docPrintJob);
MediaTray[] trays = new MediaTray[]{
MediaTray.TOP,
MediaTray.MIDDLE,
MediaTray.BOTTOM
};
when(psDefault.getSupportedAttributeValues(Media.class, null, null)).thenReturn(trays);
PrintServiceLookup.registerServiceProvider(psLookup);
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.lib.security.http;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import com.streamsets.datacollector.util.Configuration;
import com.streamsets.pipeline.api.impl.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
public class RemoteSSOService extends AbstractSSOService {
private static final Logger LOG = LoggerFactory.getLogger(RemoteSSOService.class);
public static final String DPM_BASE_URL_CONFIG = "dpm.base.url";
public static final String DPM_BASE_URL_DEFAULT = "http://localhost:18631";
public static final String SECURITY_SERVICE_APP_AUTH_TOKEN_CONFIG = CONFIG_PREFIX + "appAuthToken";
public static final String SECURITY_SERVICE_COMPONENT_ID_CONFIG = CONFIG_PREFIX + "componentId";
public static final String SECURITY_SERVICE_CONNECTION_TIMEOUT_CONFIG = CONFIG_PREFIX + "connectionTimeout.millis";
public static final int DEFAULT_SECURITY_SERVICE_CONNECTION_TIMEOUT = 10000;
public static final String DPM_ENABLED = CONFIG_PREFIX + "enabled";
public static final boolean DPM_ENABLED_DEFAULT = false;
public static final String DPM_REGISTRATION_RETRY_ATTEMPTS = "registration.retry.attempts";
public static final int DPM_REGISTRATION_RETRY_ATTEMPTS_DEFAULT = 5;
RestClient.Builder registerClientBuilder;
RestClient.Builder userAuthClientBuilder;
RestClient.Builder appAuthClientBuilder;
private String appToken;
private String componentId;
private volatile int connTimeout;
private int dpmRegistrationMaxRetryAttempts;
private volatile boolean serviceActive;
@Override
public void setConfiguration(Configuration conf) {
super.setConfiguration(conf);
String dpmBaseUrl = getValidURL(conf.get(DPM_BASE_URL_CONFIG, DPM_BASE_URL_DEFAULT));
String baseUrl = dpmBaseUrl + "security";
Utils.checkArgument(
baseUrl.toLowerCase().startsWith("http:") || baseUrl.toLowerCase().startsWith("https:"),
Utils.formatL("Security service base URL must be HTTP/HTTPS '{}'", baseUrl)
);
if (baseUrl.toLowerCase().startsWith("http://")) {
LOG.warn("Security service base URL is not secure '{}'", baseUrl);
}
setLoginPageUrl(baseUrl + "/login");
setLogoutUrl(baseUrl + "/_logout");
componentId = conf.get(SECURITY_SERVICE_COMPONENT_ID_CONFIG, null);
appToken = conf.get(SECURITY_SERVICE_APP_AUTH_TOKEN_CONFIG, null);
connTimeout = conf.get(SECURITY_SERVICE_CONNECTION_TIMEOUT_CONFIG, DEFAULT_SECURITY_SERVICE_CONNECTION_TIMEOUT);
dpmRegistrationMaxRetryAttempts = conf.get(DPM_REGISTRATION_RETRY_ATTEMPTS,
DPM_REGISTRATION_RETRY_ATTEMPTS_DEFAULT);
registerClientBuilder = RestClient.builder(baseUrl)
.csrf(true)
.json(true)
.path("public-rest/v1/components/registration")
.timeout(connTimeout);
userAuthClientBuilder = RestClient.builder(baseUrl)
.csrf(true)
.json(true)
.path("rest/v1/validateAuthToken/user")
.timeout(connTimeout);
appAuthClientBuilder = RestClient.builder(baseUrl)
.csrf(true)
.json(true)
.path("rest/v1/validateAuthToken/component")
.timeout(connTimeout);
}
@VisibleForTesting
public RestClient.Builder getRegisterClientBuilder() {
return registerClientBuilder;
}
@VisibleForTesting
public RestClient.Builder getUserAuthClientBuilder() {
return userAuthClientBuilder;
}
@VisibleForTesting
public RestClient.Builder getAppAuthClientBuilder() {
return appAuthClientBuilder;
}
@VisibleForTesting
void sleep(int secs) {
try {
Thread.sleep(secs * 1000);
} catch (InterruptedException ex) {
String msg = "Interrupted while attempting DPM registration";
LOG.error(msg);
throw new RuntimeException(msg, ex);
}
}
void updateConnectionTimeout(RestClient.Response response) {
String timeout = response.getHeader(SSOConstants.X_APP_CONNECTION_TIMEOUT);
connTimeout = (timeout == null) ? connTimeout: Integer.parseInt(timeout);
}
boolean checkServiceActive() {
boolean active;
try {
URL url = new URL(getLoginPageUrl());
int status = ((HttpURLConnection)url.openConnection()).getResponseCode();
active = status == HttpURLConnection.HTTP_OK;
if (!active) {
LOG.warn("DPM reachable but returning '{}' HTTP status on login", status);
}
} catch (IOException ex) {
LOG.warn("DPM not reachable: {}", ex.toString());
active = false;
}
LOG.debug("DPM current status '{}'", (active) ? "ACTIVE" : "NON ACTIVE");
return active;
}
public boolean isServiceActive(boolean checkNow) {
if (checkNow) {
serviceActive = checkServiceActive();
}
return serviceActive;
}
@Override
public void register(Map<String, String> attributes) {
if (appToken.isEmpty() || componentId.isEmpty()) {
if (appToken.isEmpty()) {
LOG.warn("Skipping component registration to DPM, application auth token is not set");
}
if (componentId.isEmpty()) {
LOG.warn("Skipping component registration to DPM, component ID is not set");
}
throw new RuntimeException("Registration to DPM not done, missing component ID or app auth token");
} else {
LOG.debug("Doing component ID '{}' registration with DPM", componentId);
Map<String, Object> registrationData = new HashMap<>();
registrationData.put("authToken", appToken);
registrationData.put("componentId", componentId);
registrationData.put("attributes", attributes);
int delaySecs = 1;
int attempts = 0;
boolean registered = false;
//When Load Balancer(HAProxy or ELB) is used, it will take couple of seconds for load balancer to access
//security service. So we are retrying registration couple of times until server is accessible via load balancer.
while (attempts < dpmRegistrationMaxRetryAttempts) {
if (attempts > 0) {
delaySecs = delaySecs * 2;
delaySecs = Math.min(delaySecs, 16);
LOG.warn("DPM registration attempt '{}', waiting for '{}' seconds before retrying ...", attempts, delaySecs);
sleep(delaySecs);
}
attempts++;
try {
RestClient restClient = getRegisterClientBuilder().build();
RestClient.Response response = restClient.post(registrationData);
if (response.getStatus() == HttpURLConnection.HTTP_OK) {
updateConnectionTimeout(response);
LOG.info("Registered with DPM");
registered = true;
break;
} else if (response.getStatus() == HttpURLConnection.HTTP_UNAVAILABLE) {
LOG.warn("DPM Registration unavailable");
} else if (response.getStatus() == HttpURLConnection.HTTP_FORBIDDEN) {
throw new RuntimeException(Utils.format(
"Failed registration for component ID '{}': {}",
componentId,
response.getError()
));
} else {
LOG.warn("Failed to registered to DPM, HTTP status '{}': {}", response.getStatus(), response.getError());
break;
}
} catch (IOException ex) {
LOG.warn("DPM Registration failed: {}", ex.toString());
}
}
if (registered) {
clearCaches();
serviceActive = true;
} else {
LOG.warn("DPM registration failed after '{}' attempts", attempts);
}
}
}
public void setComponentId(String componentId) {
componentId = (componentId != null) ? componentId.trim() : null;
Utils.checkArgument(componentId != null && !componentId.isEmpty(), "Component ID cannot be NULL or empty");
this.componentId = componentId;
registerClientBuilder.componentId(componentId);
userAuthClientBuilder.componentId(componentId);
appAuthClientBuilder.componentId(componentId);
}
public void setApplicationAuthToken(String appToken) {
appToken = (appToken != null) ? appToken.trim() : null;
this.appToken = appToken;
registerClientBuilder.appAuthToken(appToken);
userAuthClientBuilder.appAuthToken(appToken);
appAuthClientBuilder.appAuthToken(appToken);
}
private boolean checkServiceActiveIfInActive() {
if (!serviceActive) {
serviceActive = checkServiceActive();
}
return serviceActive;
}
protected SSOPrincipal validateUserTokenWithSecurityService(String userAuthToken)
throws ForbiddenException {
Utils.checkState(checkServiceActiveIfInActive(), "Security service not active");
ValidateUserAuthTokenJson authTokenJson = new ValidateUserAuthTokenJson();
authTokenJson.setAuthToken(userAuthToken);
SSOPrincipalJson principal;
try {
RestClient restClient = getUserAuthClientBuilder().build();
RestClient.Response response = restClient.post(authTokenJson);
if (response.getStatus() == HttpURLConnection.HTTP_OK) {
updateConnectionTimeout(response);
principal = response.getData(SSOPrincipalJson.class);
} else if (response.getStatus() == HttpURLConnection.HTTP_FORBIDDEN) {
throw new ForbiddenException(response.getError());
} else {
throw new RuntimeException(Utils.format(
"Could not validate user token '{}', HTTP status '{}' message: {}",
null,
response.getStatus(),
response.getError()
));
}
} catch (IOException ex){
LOG.warn("Could not do user token validation, going inactive: {}", ex.toString());
serviceActive = false;
Map error = ImmutableMap.of("message", "Could not connect to security service: " + ex.toString());
throw new ForbiddenException(error);
}
if (principal != null) {
principal.setTokenStr(userAuthToken);
principal.lock();
LOG.debug("Validated user auth token for '{}'", principal.getPrincipalId());
}
return principal;
}
protected SSOPrincipal validateAppTokenWithSecurityService(String authToken, String componentId)
throws ForbiddenException {
Utils.checkState(checkServiceActiveIfInActive(), "Security service not active");
ValidateComponentAuthTokenJson authTokenJson = new ValidateComponentAuthTokenJson();
authTokenJson.setComponentId(componentId);
authTokenJson.setAuthToken(authToken);
SSOPrincipalJson principal;
try {
RestClient restClient = getAppAuthClientBuilder().build();
RestClient.Response response = restClient.post(authTokenJson);
if (response.getStatus() == HttpURLConnection.HTTP_OK) {
updateConnectionTimeout(response);
principal = response.getData(SSOPrincipalJson.class);
} else if (response.getStatus() == HttpURLConnection.HTTP_FORBIDDEN) {
throw new ForbiddenException(response.getError());
} else {
throw new RuntimeException(Utils.format(
"Could not validate app token for component ID '{}', HTTP status '{}' message: {}",
componentId,
response.getStatus(),
response.getError()
));
}
} catch (IOException ex){
LOG.warn("Could not do app token validation, going inactive: {}", ex.toString());
serviceActive = false;
Map error = ImmutableMap.of("message", "Could not connect to seucirty service: " + ex.toString());
throw new ForbiddenException(error);
}
if (principal != null) {
principal.setTokenStr(authToken);
principal.lock();
LOG.debug("Validated app auth token for '{}'", principal.getPrincipalId());
}
return principal;
}
public static String getValidURL(String url) {
if (!url.endsWith("/")) {
url += "/";
}
return url;
}
@VisibleForTesting
int getConnectionTimeout() {
return connTimeout;
}
}
| |
package mil.nga.giat.geowave.test.service;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.concurrent.TimeUnit;
import org.mortbay.jetty.Connector;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.bio.SocketConnector;
import org.mortbay.jetty.webapp.WebAppClassLoader;
import org.mortbay.jetty.webapp.WebAppContext;
import org.mortbay.xml.XmlConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import mil.nga.giat.geowave.test.GeoWaveITRunner;
import mil.nga.giat.geowave.test.TestEnvironment;
import mil.nga.giat.geowave.test.TestUtils;
import mil.nga.giat.geowave.test.mapreduce.MapReduceTestEnvironment;
public class ServicesTestEnvironment implements
TestEnvironment
{
private static final Logger LOGGER = LoggerFactory.getLogger(ServicesTestEnvironment.class);
private static ServicesTestEnvironment singletonInstance = null;
public static synchronized ServicesTestEnvironment getInstance() {
if (singletonInstance == null) {
singletonInstance = new ServicesTestEnvironment();
}
return singletonInstance;
}
private static String[] PARENT_CLASSLOADER_LIBRARIES = new String[] {
"hbase",
"hadoop",
"protobuf"
};
protected static final int JETTY_PORT = 9011;
protected static final String JETTY_BASE_URL = "http://localhost:" + JETTY_PORT;
protected static final int ACCEPT_QUEUE_SIZE = 100;
protected static final int MAX_IDLE_TIME = (int) TimeUnit.HOURS.toMillis(1);
protected static final int SO_LINGER_TIME = -1;
protected static final int MAX_FORM_CONTENT_SIZE = 1024 * 1024 * 2;
protected static final String GEOSERVER_USER = "admin";
protected static final String GEOSERVER_PASS = "geoserver";
protected static final String TEST_WORKSPACE = "geowave_test";
protected static final String GEOSERVER_WAR_DIR = "target/geoserver";
protected static final String GEOSERVER_CONTEXT_PATH = "/geoserver";
protected static final String GEOSERVER_BASE_URL = JETTY_BASE_URL + GEOSERVER_CONTEXT_PATH;
protected static final String GEOSERVER_REST_PATH = GEOSERVER_BASE_URL + "/rest";
protected static final String GEOWAVE_WAR_DIR = "target/geowave-services";
protected static final String GEOWAVE_CONTEXT_PATH = "/geowave-services";
protected static final String GEOWAVE_BASE_URL = JETTY_BASE_URL + GEOWAVE_CONTEXT_PATH;
protected static final String GEOWAVE_WORKSPACE_PATH = GEOSERVER_WAR_DIR + "/data/workspaces/" + TEST_WORKSPACE;
protected static final String TEST_STYLE_NAME_NO_DIFFERENCE = "SubsamplePoints-2px";
protected static final String TEST_STYLE_NAME_MINOR_SUBSAMPLE = "SubsamplePoints-10px";
protected static final String TEST_STYLE_NAME_MAJOR_SUBSAMPLE = "SubsamplePoints-100px";
protected static final String TEST_STYLE_NAME_DISTRIBUTED_RENDER = "DistributedRender";
protected static final String TEST_STYLE_PATH = "src/test/resources/sld/";
protected static final String TEST_SLD_NO_DIFFERENCE_FILE = TEST_STYLE_PATH + TEST_STYLE_NAME_NO_DIFFERENCE
+ ".sld";
protected static final String TEST_SLD_MINOR_SUBSAMPLE_FILE = TEST_STYLE_PATH + TEST_STYLE_NAME_MINOR_SUBSAMPLE
+ ".sld";
protected static final String TEST_SLD_MAJOR_SUBSAMPLE_FILE = TEST_STYLE_PATH + TEST_STYLE_NAME_MAJOR_SUBSAMPLE
+ ".sld";
protected static final String TEST_SLD_DISTRIBUTED_RENDER_FILE = TEST_STYLE_PATH
+ TEST_STYLE_NAME_DISTRIBUTED_RENDER + ".sld";
private Server jettyServer;
@SuppressFBWarnings(value = {
"SWL_SLEEP_WITH_LOCK_HELD"
}, justification = "Jetty must be started before releasing the lock")
@Override
public void setup()
throws Exception {
synchronized (GeoWaveITRunner.MUTEX) {
// Setup activities delegated to private function
// to satisfy HP Fortify
doSetup();
}
}
private void doSetup() {
if (jettyServer == null) {
try {
// Prevent "Unauthorized class found" error
System.setProperty(
"GEOSERVER_XSTREAM_WHITELIST",
"org.geoserver.wfs.**;org.geoserver.wms.**");
// delete old workspace configuration if it's still there
jettyServer = new Server();
final SocketConnector conn = new SocketConnector();
conn.setPort(JETTY_PORT);
conn.setAcceptQueueSize(ACCEPT_QUEUE_SIZE);
conn.setMaxIdleTime(MAX_IDLE_TIME);
conn.setSoLingerTime(SO_LINGER_TIME);
jettyServer.setConnectors(new Connector[] {
conn
});
final WebAppContext gsWebapp = new WebAppContext();
gsWebapp.setContextPath(GEOSERVER_CONTEXT_PATH);
gsWebapp.setWar(GEOSERVER_WAR_DIR);
final WebAppClassLoader classLoader = AccessController
.doPrivileged(new PrivilegedAction<WebAppClassLoader>() {
@Override
public WebAppClassLoader run() {
try {
return new WebAppClassLoader(
gsWebapp);
}
catch (final IOException e) {
LOGGER.error(
"Unable to create new classloader",
e);
return null;
}
}
});
if (classLoader == null) {
throw new IOException(
"Unable to create classloader");
}
final String classpath = System.getProperty(
"java.class.path").replace(
":",
";");
final String[] individualEntries = classpath.split(";");
final StringBuffer str = new StringBuffer();
for (final String e : individualEntries) {
// HBase has certain static initializers that use reflection
// to get annotated values
// because Class instances are not equal if they are loaded
// by different class loaders this HBase initialization
// fails
// furthermore HBase's runtime dependencies need to
// be loaded by the same classloader, the webapp's parent
// class loader
// but geowave hbase datastore implementation must be loaded
// by the same classloader as geotools or the SPI loader
// won't work
boolean addLibraryToWebappContext = true;
if (!e.contains("geowave")) {
for (final String parentLoaderLibrary : PARENT_CLASSLOADER_LIBRARIES) {
if (e.contains(parentLoaderLibrary)) {
addLibraryToWebappContext = false;
break;
}
}
}
if (addLibraryToWebappContext) {
str.append(
e).append(
";");
}
}
classLoader.addClassPath(str.toString());
gsWebapp.setClassLoader(classLoader);
// this has to be false for geoserver to load the correct guava
// classes (until hadoop updates guava support to a later
// version, slated for hadoop 3.x)
gsWebapp.setParentLoaderPriority(false);
final File warDir = new File(
GEOWAVE_WAR_DIR);
// update the config file
ServicesTestUtils.writeConfigFile(new File(
warDir,
"/WEB-INF/config.properties"));
final WebAppContext gwWebapp = new WebAppContext();
gwWebapp.setContextPath(GEOWAVE_CONTEXT_PATH);
gwWebapp.setWar(warDir.getAbsolutePath());
jettyServer.setHandlers(new WebAppContext[] {
gsWebapp,
gwWebapp
});
gsWebapp.setTempDirectory(TestUtils.TEMP_DIR);
// this allows to send large SLD's from the styles form
gsWebapp.getServletContext().getContextHandler().setMaxFormContentSize(
MAX_FORM_CONTENT_SIZE);
final String jettyConfigFile = System.getProperty("jetty.config.file");
if (jettyConfigFile != null) {
LOGGER.info("Loading Jetty config from file: " + jettyConfigFile);
(new XmlConfiguration(
new FileInputStream(
jettyConfigFile))).configure(jettyServer);
}
jettyServer.start();
while (!jettyServer.isRunning() && !jettyServer.isStarted()) {
Thread.sleep(1000);
}
}
catch (final RuntimeException e) {
throw e;
}
catch (final Exception e) {
LOGGER.error(
"Could not start the Jetty server: " + e.getMessage(),
e);
if (jettyServer.isRunning()) {
try {
jettyServer.stop();
}
catch (final Exception e1) {
LOGGER.error(
"Unable to stop the Jetty server",
e1);
}
}
}
}
}
@Override
public void tearDown()
throws Exception {
synchronized (GeoWaveITRunner.MUTEX) {
if (!GeoWaveITRunner.DEFER_CLEANUP.get()) {
if (jettyServer != null) {
try {
jettyServer.stop();
jettyServer = null;
}
catch (final Exception e) {
LOGGER.error(
"Unable to stop the Jetty server",
e);
}
}
}
}
}
@Override
public TestEnvironment[] getDependentEnvironments() {
return new TestEnvironment[] {
MapReduceTestEnvironment.getInstance()
};
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.select;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.base.Preconditions;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.query.BaseQuery;
import io.druid.query.DataSource;
import io.druid.query.Query;
import io.druid.query.Result;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.filter.DimFilter;
import io.druid.query.spec.QuerySegmentSpec;
import io.druid.segment.VirtualColumns;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
*/
@JsonTypeName("select")
public class SelectQuery extends BaseQuery<Result<SelectResultValue>>
{
private final DimFilter dimFilter;
private final Granularity granularity;
private final List<DimensionSpec> dimensions;
private final List<String> metrics;
private final VirtualColumns virtualColumns;
private final PagingSpec pagingSpec;
@JsonCreator
public SelectQuery(
@JsonProperty("dataSource") DataSource dataSource,
@JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
@JsonProperty("descending") boolean descending,
@JsonProperty("filter") DimFilter dimFilter,
@JsonProperty("granularity") Granularity granularity,
@JsonProperty("dimensions") List<DimensionSpec> dimensions,
@JsonProperty("metrics") List<String> metrics,
@JsonProperty("virtualColumns") VirtualColumns virtualColumns,
@JsonProperty("pagingSpec") PagingSpec pagingSpec,
@JsonProperty("context") Map<String, Object> context
)
{
super(dataSource, querySegmentSpec, descending, context);
this.dimFilter = dimFilter;
this.granularity = granularity;
this.dimensions = dimensions;
this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns);
this.metrics = metrics;
this.pagingSpec = pagingSpec;
Preconditions.checkNotNull(pagingSpec, "must specify a pagingSpec");
Preconditions.checkArgument(checkPagingSpec(pagingSpec, descending), "invalid pagingSpec");
}
private boolean checkPagingSpec(PagingSpec pagingSpec, boolean descending)
{
for (Integer value : pagingSpec.getPagingIdentifiers().values()) {
if (descending ^ (value < 0)) {
return false;
}
}
return pagingSpec.getThreshold() >= 0;
}
@Override
public boolean hasFilters()
{
return dimFilter != null;
}
@Override
public DimFilter getFilter()
{
return dimFilter;
}
@Override
public String getType()
{
return Query.SELECT;
}
@JsonProperty("filter")
public DimFilter getDimensionsFilter()
{
return dimFilter;
}
@JsonProperty
public Granularity getGranularity()
{
return granularity;
}
@JsonProperty
public List<DimensionSpec> getDimensions()
{
return dimensions;
}
@JsonProperty
public PagingSpec getPagingSpec()
{
return pagingSpec;
}
@JsonProperty
public List<String> getMetrics()
{
return metrics;
}
@JsonProperty
public VirtualColumns getVirtualColumns()
{
return virtualColumns;
}
public PagingOffset getPagingOffset(String identifier)
{
return pagingSpec.getOffset(identifier, isDescending());
}
public SelectQuery withQuerySegmentSpec(QuerySegmentSpec querySegmentSpec)
{
return new SelectQuery(
getDataSource(),
querySegmentSpec,
isDescending(),
dimFilter,
granularity,
dimensions,
metrics,
virtualColumns,
pagingSpec,
getContext()
);
}
@Override
public Query<Result<SelectResultValue>> withDataSource(DataSource dataSource)
{
return new SelectQuery(
dataSource,
getQuerySegmentSpec(),
isDescending(),
dimFilter,
granularity,
dimensions,
metrics,
virtualColumns,
pagingSpec,
getContext()
);
}
public SelectQuery withOverriddenContext(Map<String, Object> contextOverrides)
{
return new SelectQuery(
getDataSource(),
getQuerySegmentSpec(),
isDescending(),
dimFilter,
granularity,
dimensions,
metrics,
virtualColumns,
pagingSpec,
computeOverridenContext(contextOverrides)
);
}
public SelectQuery withPagingSpec(PagingSpec pagingSpec)
{
return new SelectQuery(
getDataSource(),
getQuerySegmentSpec(),
isDescending(),
dimFilter,
granularity,
dimensions,
metrics,
virtualColumns,
pagingSpec,
getContext()
);
}
public SelectQuery withDimFilter(DimFilter dimFilter)
{
return new SelectQuery(
getDataSource(),
getQuerySegmentSpec(),
isDescending(),
dimFilter,
granularity,
dimensions,
metrics,
virtualColumns,
pagingSpec,
getContext()
);
}
@Override
public String toString()
{
return "SelectQuery{" +
"dataSource='" + getDataSource() + '\'' +
", querySegmentSpec=" + getQuerySegmentSpec() +
", descending=" + isDescending() +
", dimFilter=" + dimFilter +
", granularity=" + granularity +
", dimensions=" + dimensions +
", metrics=" + metrics +
", virtualColumns=" + virtualColumns +
", pagingSpec=" + pagingSpec +
'}';
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
SelectQuery that = (SelectQuery) o;
if (!Objects.equals(dimFilter, that.dimFilter)) {
return false;
}
if (!Objects.equals(granularity, that.granularity)) {
return false;
}
if (!Objects.equals(dimensions, that.dimensions)) {
return false;
}
if (!Objects.equals(metrics, that.metrics)) {
return false;
}
if (!Objects.equals(virtualColumns, that.virtualColumns)) {
return false;
}
if (!Objects.equals(pagingSpec, that.pagingSpec)) {
return false;
}
return true;
}
@Override
public int hashCode()
{
int result = super.hashCode();
result = 31 * result + (dimFilter != null ? dimFilter.hashCode() : 0);
result = 31 * result + (granularity != null ? granularity.hashCode() : 0);
result = 31 * result + (dimensions != null ? dimensions.hashCode() : 0);
result = 31 * result + (metrics != null ? metrics.hashCode() : 0);
result = 31 * result + (virtualColumns != null ? virtualColumns.hashCode() : 0);
result = 31 * result + (pagingSpec != null ? pagingSpec.hashCode() : 0);
return result;
}
}
| |
/*
* krypt-core API - Java version
*
* Copyright (c) 2011-2013
* Hiroshi Nakamura <nahi@ruby-lang.org>
* Martin Bosslet <martin.bosslet@gmail.com>
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.jruby.ext.krypt.asn1;
import impl.krypt.asn1.Length;
import impl.krypt.asn1.ParseException;
import impl.krypt.asn1.ParsedHeader;
import impl.krypt.asn1.SerializeException;
import impl.krypt.asn1.Tag;
import impl.krypt.asn1.TagClass;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import org.jruby.Ruby;
import org.jruby.RubyBoolean;
import org.jruby.RubyClass;
import org.jruby.RubyEncoding;
import org.jruby.RubyIO;
import org.jruby.RubyModule;
import org.jruby.RubyObject;
import org.jruby.RubySymbol;
import org.jruby.anno.JRubyMethod;
import org.jruby.ext.krypt.Errors;
import org.jruby.ext.krypt.Streams;
import org.jruby.runtime.ObjectAllocator;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.jruby.util.ByteList;
/**
*
* @author <a href="mailto:Martin.Bosslet@gmail.com">Martin Bosslet</a>
*/
public class RubyHeader extends RubyObject {
public static void createHeader(Ruby runtime, RubyModule mASN1) {
mASN1.defineClassUnder("Header", runtime.getObject(), ObjectAllocator.NOT_ALLOCATABLE_ALLOCATOR)
.defineAnnotatedMethods(RubyHeader.class);
}
private final ParsedHeader h;
private final IRubyObject tag;
private final IRubyObject tagClass;
private final IRubyObject isConstructed;
private final IRubyObject isInfLen;
private final IRubyObject len;
private final IRubyObject hlen;
private IRubyObject cachedValue;
public RubyHeader(Ruby runtime, RubyClass type, impl.krypt.asn1.ParsedHeader h) {
super(runtime, type);
if (h == null) throw new NullPointerException();
this.h = h;
Tag t = h.getTag();
Length l = h.getLength();
this.tag = runtime.newFixnum(t.getTag());
this.tagClass = tagClassFor(runtime, t.getTagClass());
this.isConstructed = runtime.newBoolean(t.isConstructed());
this.isInfLen = runtime.newBoolean(l.isInfiniteLength());
this.len = runtime.newFixnum(l.getLength());
this.hlen = runtime.newFixnum(h.getHeaderLength());
}
static IRubyObject tagClassFor(Ruby runtime, TagClass tc) {
switch(tc) {
case UNIVERSAL:
return RubySymbol.newSymbol(runtime, TagClass.UNIVERSAL.name());
case CONTEXT_SPECIFIC:
return RubySymbol.newSymbol(runtime, TagClass.CONTEXT_SPECIFIC.name());
case APPLICATION:
return RubySymbol.newSymbol(runtime, TagClass.APPLICATION.name());
case PRIVATE:
return RubySymbol.newSymbol(runtime, TagClass.PRIVATE.name());
default:
throw runtime.newRuntimeError("Unkown TagClass " + tc);
}
}
@JRubyMethod
public IRubyObject tag() {
return tag;
}
@JRubyMethod
public IRubyObject tag_class() {
return tagClass;
}
@JRubyMethod(name="constructed?")
public IRubyObject is_constructed() {
return isConstructed;
}
@JRubyMethod(name="infinite?")
public IRubyObject is_infinite() {
return isInfLen;
}
@JRubyMethod(name={"size","length"})
public IRubyObject size() {
return len;
}
@JRubyMethod(name={"header_size","header_length"})
public IRubyObject header_size() {
return hlen;
}
@JRubyMethod
public IRubyObject encode_to(ThreadContext ctx, IRubyObject io) {
Ruby runtime = ctx.getRuntime();
OutputStream out = Streams.tryWrapAsOuputStream(runtime, io);
try {
h.encodeTo(out);
return this;
}
catch (SerializeException ex) {
throw Errors.newSerializeError(runtime, ex.getMessage());
}
}
@JRubyMethod
public IRubyObject bytes(ThreadContext ctx) {
Ruby runtime = ctx.getRuntime();
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
h.encodeTo(baos);
return runtime.newString(new ByteList(baos.toByteArray(), false));
}
catch (SerializeException ex) {
throw Errors.newSerializeError(runtime, ex.getMessage());
}
}
@JRubyMethod
public synchronized IRubyObject skip_value() {
h.skipValue();
return this;
}
@JRubyMethod
public synchronized IRubyObject value(ThreadContext ctx) {
if (cachedValue == null) {
cachedValue = readValue(ctx);
}
return cachedValue;
}
private IRubyObject readValue(ThreadContext ctx) {
Ruby runtime = ctx.getRuntime();
try {
byte[] value = h.getValue();
if (value == null || value.length == 0)
return runtime.getNil();
else
return runtime.newString(new ByteList(value, false));
}
catch (ParseException ex) {
throw Errors.newParseError(runtime, ex.getMessage());
}
}
@JRubyMethod(optional=1)
public synchronized IRubyObject value_io(ThreadContext ctx, IRubyObject[] args) {
Ruby runtime = ctx.getRuntime();
IRubyObject valuesOnly = args.length > 0 ? args[0] : RubyBoolean.newBoolean(runtime, true);
try {
InputStream valueStream = h.getValueStream(valuesOnly.isTrue());
RubyIO io = new RubyIO(runtime, valueStream);
IRubyObject binaryEncoding = RubyEncoding.newEncoding(runtime,
runtime.getEncodingService().getAscii8bitEncoding());
io.set_encoding(ctx, binaryEncoding);
return io;
}
catch (ParseException ex) {
throw Errors.newParseError(runtime, ex.getMessage());
}
}
@JRubyMethod
public IRubyObject to_s(ThreadContext ctx) {
Tag t = h.getTag();
Length l = h.getLength();
String s = new StringBuilder()
.append("Tag: ").append(t.getTag())
.append(" Tag Class: ").append(t.getTagClass().name())
.append(" Length: ").append(l.getLength())
.append(" Header Length: ").append(h.getHeaderLength())
.append(" Constructed: ").append(t.isConstructed())
.append(" Infinite Length: ").append(l.isInfiniteLength())
.toString();
return ctx.getRuntime().newString(s);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.ldap.client.api;
import java.io.IOException;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.message.AddRequest;
import org.apache.directory.api.ldap.model.message.BindRequest;
import org.apache.directory.api.ldap.model.message.CompareRequest;
import org.apache.directory.api.ldap.model.message.DeleteRequest;
import org.apache.directory.api.ldap.model.message.ExtendedRequest;
import org.apache.directory.api.ldap.model.message.ModifyDnRequest;
import org.apache.directory.api.ldap.model.message.ModifyRequest;
import org.apache.directory.api.ldap.model.message.SearchRequest;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.ldap.client.api.future.AddFuture;
import org.apache.directory.ldap.client.api.future.BindFuture;
import org.apache.directory.ldap.client.api.future.CompareFuture;
import org.apache.directory.ldap.client.api.future.DeleteFuture;
import org.apache.directory.ldap.client.api.future.ExtendedFuture;
import org.apache.directory.ldap.client.api.future.ModifyDnFuture;
import org.apache.directory.ldap.client.api.future.ModifyFuture;
import org.apache.directory.ldap.client.api.future.SearchFuture;
/**
* Root interface for all asynchronous LDAP connections.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public interface LdapAsyncConnection extends LdapConnection
{
/**
* Add an entry to the server asynchronously. This is a non blocking add :
* the user has to get for the response from the returned Future.
*
* @param entry The entry to add
* @return the add operation's future
* @throws LdapException if some error occurred
*/
AddFuture addAsync( Entry entry ) throws LdapException;
/**
* Add an entry present in the AddRequest to the server.
*
* @param addRequest the request object containing an entry and controls(if any)
* @return the add operation's future
* @throws LdapException if some error occurred
*/
AddFuture addAsync( AddRequest addRequest ) throws LdapException;
/**
* Asynchronous Bind on a server, using the LdapConnectionConfig informations.
*
* @return the bind operation's future
* @throws LdapException if some error occurred
* @throws IOException if some IO error occurred
*/
BindFuture bindAsync() throws LdapException, IOException;
/**
* Anonymous asynchronous Bind on a server.
*
* @return the bind operation's future
* @throws LdapException if some error occurred
* @throws IOException if some IO error occurred
*/
BindFuture anonymousBindAsync() throws LdapException, IOException;
/**
* Simple asynchronous Bind on a server.
*
* @param name The name we use to authenticate the user, it must be a valid Dn
* @param credentials The password, it can't be null
* @return the bind operation's future
* @throws LdapException if some error occurred
* @throws IOException if some IO error occurred
*/
BindFuture bindAsync( String name, String credentials ) throws LdapException, IOException;
/**
* Simple asynchronous Bind on a server.
*
* @param name The name we use to authenticate the user, it must be a valid Dn
* @param credentials The password, it can't be null
* @return the bind operation's future
* @throws LdapException if some error occurred
* @throws IOException if some IO error occurred
*/
BindFuture bindAsync( Dn name, String credentials ) throws LdapException, IOException;
/**
* Do an asynchronous bind, based on a BindRequest.
*
* @param bindRequest The BindRequest to send
* @return the bind operation's future
* @throws LdapException if some error occurred
* @throws IOException if some IO error occurred
*/
BindFuture bindAsync( BindRequest bindRequest ) throws LdapException, IOException;
/**
* Do an asynchronous search, on the base object, using the given filter. The
* SearchRequest parameters default to :
* <pre>
* Scope : ONE
* DerefAlias : ALWAYS
* SizeLimit : none
* TimeLimit : none
* TypesOnly : false
* Attributes : all the user's attributes.
* This method is blocking.
* </pre>
*
* @param baseDn The base for the search, it must be a valid Dn, and can't be emtpy
* @param filter The filter to use for this search, it can't be empty
* @param scope The search scope : OBJECT, ONELEVEL or SUBTREE
* @param attributes The attributes for this search
* @return the search operation's future
* @throws org.apache.directory.api.ldap.model.exception.LdapException if some error occurred
*/
SearchFuture searchAsync( String baseDn, String filter, SearchScope scope, String... attributes )
throws LdapException;
/**
* Do an asynchronous search, on the base object, using the given filter. The
* SearchRequest parameters default to :
* <pre>
* Scope : ONE
* DerefAlias : ALWAYS
* SizeLimit : none
* TimeLimit : none
* TypesOnly : false
* Attributes : all the user's attributes.
* This method is blocking.
* </pre>
*
* @param baseDn The base for the search, it must be a valid Dn, and can't be empty
* @param filter The filter to use for this search, it can't be empty
* @param scope The search scope : OBJECT, ONELEVEL or SUBTREE
* @param attributes The attributes for this search
* @return the search operation's future
* @throws LdapException if some error occurred
*/
SearchFuture searchAsync( Dn baseDn, String filter, SearchScope scope, String... attributes )
throws LdapException;
/**
* Do a search, on the base object, using the given filter. The
* SearchRequest parameters default to :
* <pre>
* Scope : ONE
* DerefAlias : ALWAYS
* SizeLimit : none
* TimeLimit : none
* TypesOnly : false
* Attributes : all the user's attributes.
* This method is blocking.
* </pre>
*
* @param searchRequest The search request to send to the server
* @return the search operation's future
* @throws LdapException if some error occurred
*/
SearchFuture searchAsync( SearchRequest searchRequest ) throws LdapException;
/**
* Performs an asynchronous modify operation based on the modifications present in
* the ModifyRequest.
*
* @param modRequest the request for modify operation
* @return the modify operation's future
* @throws LdapException in case of modify operation failure or timeout happens
*/
ModifyFuture modifyAsync( ModifyRequest modRequest ) throws LdapException;
/**
* Performs the modifyDn operation based on the given ModifyDnRequest.
*
* @param modDnRequest the request
* @return modifyDn operation's future
* @throws LdapException if some error occurred
*/
ModifyDnFuture modifyDnAsync( ModifyDnRequest modDnRequest ) throws LdapException;
/**
* Performs an asynchronous delete operation based on the delete request object.
*
* @param delRequest the delete operation's request
* @return delete operation's future
* @throws LdapException If the Dn is not valid or if the deletion failed
*/
DeleteFuture deleteAsync( DeleteRequest delRequest ) throws LdapException;
/**
* Asynchronously compares an entry's attribute's value with that of the given value
*
* @param compareRequest the CompareRequest which contains the target Dn, attribute name and value
* @return compare operation's future
* @throws LdapException if some error occurred
*/
CompareFuture compareAsync( CompareRequest compareRequest ) throws LdapException;
/**
* Asynchronously requests the server to perform an extended operation based on the given request.
*
* @param extendedRequest the object containing the details of the extended operation to be performed
* @return extended operation's Future
* @throws LdapException if some error occurred
*/
ExtendedFuture extendedAsync( ExtendedRequest extendedRequest ) throws LdapException;
/**
* Configuration of LdapNetworkConnection
*
* @return the configuration of the LDAP connection
*/
LdapConnectionConfig getConfig();
}
| |
package ca.uhn.fhir.jpa.dao.dstu2;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.AfterClass;
import org.junit.Test;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.model.dstu2.resource.Device;
import ca.uhn.fhir.model.dstu2.resource.Media;
import ca.uhn.fhir.model.dstu2.resource.Observation;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.primitive.Base64BinaryDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringOrListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.Constants;
import ca.uhn.fhir.util.TestUtil;
public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2SearchFtTest.class);
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
@Test
public void testSuggestIgnoresBase64Content() {
Patient patient = new Patient();
patient.addName().addFamily("testSuggest");
IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
Media med = new Media();
med.getSubject().setReference(ptId);
med.getSubtype().setText("Systolic Blood Pressure");
med.getContent().setContentType("LCws");
med.getContent().setData(new Base64BinaryDt(new byte[] { 44, 44, 44, 44, 44, 44, 44, 44 }));
med.getContent().setTitle("bbbb syst");
myMediaDao.create(med, mySrd);
ourLog.info(myFhirCtx.newJsonParser().encodeResourceToString(med));
List<Suggestion> output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "press");
ourLog.info("Found: " + output);
assertEquals(2, output.size());
assertEquals("Pressure", output.get(0).getTerm());
assertEquals("Systolic Blood Pressure", output.get(1).getTerm());
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "prezure");
ourLog.info("Found: " + output);
assertEquals(2, output.size());
assertEquals("Pressure", output.get(0).getTerm());
assertEquals("Systolic Blood Pressure", output.get(1).getTerm());
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "syst");
ourLog.info("Found: " + output);
assertEquals(4, output.size());
assertEquals("syst", output.get(0).getTerm());
assertEquals("bbbb syst", output.get(1).getTerm());
assertEquals("Systolic", output.get(2).getTerm());
assertEquals("Systolic Blood Pressure", output.get(3).getTerm());
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "LCws");
ourLog.info("Found: " + output);
assertEquals(0, output.size());
}
@Test
public void testSuggest() {
Patient patient = new Patient();
patient.addName().addFamily("testSuggest");
IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
Observation obs = new Observation();
obs.getSubject().setReference(ptId);
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
myObservationDao.create(obs, mySrd);
obs = new Observation();
obs.getSubject().setReference(ptId);
obs.getCode().setText("MNBVCXZ");
myObservationDao.create(obs, mySrd);
obs = new Observation();
obs.getSubject().setReference(ptId);
obs.getCode().setText("ZXC HELLO");
obs.addComponent().getCode().setText("HHHHHHHHHH");
myObservationDao.create(obs, mySrd);
/*
* These shouldn't match since they're for another patient
*/
patient = new Patient();
patient.addName().addFamily("testSuggest2");
IIdType ptId2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
Observation obs2 = new Observation();
obs2.getSubject().setReference(ptId2);
obs2.getCode().setText("ZXCVBNMZZ");
myObservationDao.create(obs2, mySrd);
List<Suggestion> output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXCVBNM");
ourLog.info("Found: " + output);
assertEquals(4, output.size());
assertEquals("ZXCVBNM", output.get(0).getTerm());
assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(1).getTerm());
assertEquals("ZXC", output.get(2).getTerm());
assertEquals("ZXC HELLO", output.get(3).getTerm());
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXC");
ourLog.info("Found: " + output);
assertEquals(4, output.size());
assertEquals("ZXC", output.get(0).getTerm());
assertEquals("ZXC HELLO", output.get(1).getTerm());
assertEquals("ZXCVBNM", output.get(2).getTerm());
assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(3).getTerm());
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "HELO");
ourLog.info("Found: " + output);
assertEquals(2, output.size());
assertEquals("HELLO", output.get(0).getTerm());
assertEquals("ZXC HELLO", output.get(1).getTerm());
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "Z");
ourLog.info("Found: " + output);
assertEquals(0, output.size());
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZX");
ourLog.info("Found: " + output);
assertEquals(2, output.size());
assertEquals("ZXC", output.get(0).getTerm());
assertEquals("ZXC HELLO", output.get(1).getTerm());
}
@Test
public void testSearchAndReindex() {
Patient patient;
SearchParameterMap map;
patient = new Patient();
patient.getText().setDiv("<div>DIVAAA</div>");
patient.addName().addGiven("NAMEAAA");
IIdType pId1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEAAA"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new StringParam("DIVAAA"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
/*
* Reindex
*/
patient = new Patient();
patient.setId(pId1);
patient.getText().setDiv("<div>DIVBBB</div>");
patient.addName().addGiven("NAMEBBB");
myPatientDao.update(patient, mySrd);
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEAAA"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), empty());
map = new SearchParameterMap();
map.add(Patient.SP_NAME, new StringParam("NAMEBBB"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEBBB"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new StringParam("DIVBBB"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
}
@Test
public void testEverythingInstanceWithContentFilter() {
Patient pt1 = new Patient();
pt1.addName().addFamily("Everything").addGiven("Arthur");
IIdType ptId1 = myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
Patient pt2 = new Patient();
pt2.addName().addFamily("Everything").addGiven("Arthur");
IIdType ptId2 = myPatientDao.create(pt2, mySrd).getId().toUnqualifiedVersionless();
Device dev1 = new Device();
dev1.setManufacturer("Some Manufacturer");
IIdType devId1 = myDeviceDao.create(dev1, mySrd).getId().toUnqualifiedVersionless();
Device dev2 = new Device();
dev2.setManufacturer("Some Manufacturer 2");
myDeviceDao.create(dev2, mySrd).getId().toUnqualifiedVersionless();
Observation obs1 = new Observation();
obs1.getText().setDiv("<div>OBSTEXT1</div>");
obs1.getSubject().setReference(ptId1);
obs1.getCode().addCoding().setCode("CODE1");
obs1.setValue(new StringDt("obsvalue1"));
obs1.getDevice().setReference(devId1);
IIdType obsId1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
Observation obs2 = new Observation();
obs2.getSubject().setReference(ptId1);
obs2.getCode().addCoding().setCode("CODE2");
obs2.setValue(new StringDt("obsvalue2"));
IIdType obsId2 = myObservationDao.create(obs2, mySrd).getId().toUnqualifiedVersionless();
Observation obs3 = new Observation();
obs3.getSubject().setReference(ptId2);
obs3.getCode().addCoding().setCode("CODE3");
obs3.setValue(new StringDt("obsvalue3"));
IIdType obsId3 = myObservationDao.create(obs3, mySrd).getId().toUnqualifiedVersionless();
HttpServletRequest request;
List<IIdType> actual;
request = mock(HttpServletRequest.class);
StringAndListParam param;
ourLog.info("Pt1:{} Pt2:{} Obs1:{} Obs2:{} Obs3:{}", new Object[] { ptId1.getIdPart(), ptId2.getIdPart(), obsId1.getIdPart(), obsId2.getIdPart(), obsId3.getIdPart() });
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
actual = toUnqualifiedVersionlessIds(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, param, null, mySrd));
assertThat(actual, containsInAnyOrder(ptId1, obsId1, devId1));
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam("obstext1")));
actual = toUnqualifiedVersionlessIds(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, param, mySrd));
assertThat(actual, containsInAnyOrder(ptId1, obsId1, devId1));
request = mock(HttpServletRequest.class);
actual = toUnqualifiedVersionlessIds(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, null, mySrd));
assertThat(actual, containsInAnyOrder(ptId1, obsId1, obsId2, devId1));
/*
* Add another match
*/
Observation obs4 = new Observation();
obs4.getSubject().setReference(ptId1);
obs4.getCode().addCoding().setCode("CODE1");
obs4.setValue(new StringDt("obsvalue1"));
IIdType obsId4 = myObservationDao.create(obs4, mySrd).getId().toUnqualifiedVersionless();
assertNotEquals(obsId1.getIdPart(), obsId4.getIdPart(), devId1);
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
actual = toUnqualifiedVersionlessIds(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, param, null, mySrd));
assertThat(actual, containsInAnyOrder(ptId1, obsId1, obsId4, devId1));
/*
* Make one previous match no longer match
*/
obs1 = new Observation();
obs1.setId(obsId1);
obs1.getSubject().setReference(ptId1);
obs1.getCode().addCoding().setCode("CODE2");
obs1.setValue(new StringDt("obsvalue2"));
myObservationDao.update(obs1, mySrd);
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
actual = toUnqualifiedVersionlessIds(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, param, null, mySrd));
assertThat(actual, containsInAnyOrder(ptId1, obsId4));
}
@Test
public void testEverythingTypeWithContentFilter() {
Patient pt1 = new Patient();
pt1.addName().addFamily("Everything").addGiven("Arthur");
IIdType ptId1 = myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
Patient pt2 = new Patient();
pt2.addName().addFamily("Everything").addGiven("Arthur");
IIdType ptId2 = myPatientDao.create(pt2, mySrd).getId().toUnqualifiedVersionless();
Device dev1 = new Device();
dev1.setManufacturer("Some Manufacturer");
IIdType devId1 = myDeviceDao.create(dev1, mySrd).getId().toUnqualifiedVersionless();
Device dev2 = new Device();
dev2.setManufacturer("Some Manufacturer 2");
IIdType devId2 = myDeviceDao.create(dev2, mySrd).getId().toUnqualifiedVersionless();
Observation obs1 = new Observation();
obs1.getSubject().setReference(ptId1);
obs1.getCode().addCoding().setCode("CODE1");
obs1.setValue(new StringDt("obsvalue1"));
obs1.getDevice().setReference(devId1);
IIdType obsId1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
Observation obs2 = new Observation();
obs2.getSubject().setReference(ptId1);
obs2.getCode().addCoding().setCode("CODE2");
obs2.setValue(new StringDt("obsvalue2"));
IIdType obsId2 = myObservationDao.create(obs2, mySrd).getId().toUnqualifiedVersionless();
Observation obs3 = new Observation();
obs3.getSubject().setReference(ptId2);
obs3.getCode().addCoding().setCode("CODE3");
obs3.setValue(new StringDt("obsvalue3"));
IIdType obsId3 = myObservationDao.create(obs3, mySrd).getId().toUnqualifiedVersionless();
HttpServletRequest request;
List<IIdType> actual;
request = mock(HttpServletRequest.class);
StringAndListParam param;
ourLog.info("Pt1:{} Pt2:{} Obs1:{} Obs2:{} Obs3:{}", new Object[] { ptId1.getIdPart(), ptId2.getIdPart(), obsId1.getIdPart(), obsId2.getIdPart(), obsId3.getIdPart() });
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
actual = toUnqualifiedVersionlessIds(myPatientDao.patientTypeEverything(request, null, null, null, param, null, mySrd));
assertThat(actual, containsInAnyOrder(ptId1, obsId1, devId1));
request = mock(HttpServletRequest.class);
actual = toUnqualifiedVersionlessIds(myPatientDao.patientTypeEverything(request, null, null, null, null, null, mySrd));
assertThat(actual, containsInAnyOrder(ptId1, obsId1, obsId2, devId1, ptId2, obsId3));
/*
* Add another match
*/
Observation obs4 = new Observation();
obs4.getSubject().setReference(ptId1);
obs4.getCode().addCoding().setCode("CODE1");
obs4.setValue(new StringDt("obsvalue1"));
IIdType obsId4 = myObservationDao.create(obs4, mySrd).getId().toUnqualifiedVersionless();
assertNotEquals(obsId1.getIdPart(), obsId4.getIdPart(), devId1);
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
actual = toUnqualifiedVersionlessIds(myPatientDao.patientTypeEverything(request, null, null, null, param, null, mySrd));
assertThat(actual, containsInAnyOrder(ptId1, obsId1, obsId4, devId1));
/*
* Make one previous match no longer match
*/
obs1 = new Observation();
obs1.setId(obsId1);
obs1.getSubject().setReference(ptId1);
obs1.getCode().addCoding().setCode("CODE2");
obs1.setValue(new StringDt("obsvalue2"));
myObservationDao.update(obs1, mySrd);
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
actual = toUnqualifiedVersionlessIds(myPatientDao.patientTypeEverything(request, null, null, null, param, null, mySrd));
assertThat(actual, containsInAnyOrder(ptId1, obsId4));
}
/**
* When processing transactions, we do two passes. Make sure we don't update the lucene index twice since that would be inefficient
*/
@Test
public void testSearchDontReindexForUpdateWithIndexDisabled() {
Patient patient;
SearchParameterMap map;
patient = new Patient();
patient.getText().setDiv("<div>DIVAAA</div>");
patient.addName().addGiven("NAMEAAA");
IIdType pId1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEAAA"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new StringParam("DIVAAA"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
/*
* Update but don't reindex
*/
patient = new Patient();
patient.setId(pId1);
patient.getText().setDiv("<div>DIVBBB</div>");
patient.addName().addGiven("NAMEBBB");
myPatientDao.update(patient, null, false, mySrd);
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEAAA"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEBBB"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), not(contains(pId1)));
myPatientDao.update(patient, null, true, mySrd);
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEAAA"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), empty());
map = new SearchParameterMap();
map.add(Patient.SP_NAME, new StringParam("NAMEBBB"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEBBB"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new StringParam("DIVBBB"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
}
@Test
public void testSearchWithChainedParams() {
String methodName = "testSearchWithChainedParams";
IIdType pId1;
{
Patient patient = new Patient();
patient.addName().addGiven(methodName);
patient.addAddress().addLine("My fulltext address");
pId1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
Observation obs = new Observation();
obs.getSubject().setReference(pId1);
obs.setValue(new StringDt("This is the FULLtext of the observation"));
IIdType oId1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
obs = new Observation();
obs.getSubject().setReference(pId1);
obs.setValue(new StringDt("Another fullText"));
IIdType oId2 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
List<IIdType> patients;
SearchParameterMap params;
params = new SearchParameterMap();
params.add(Constants.PARAM_CONTENT, new StringParam("fulltext"));
patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, containsInAnyOrder(pId1));
params = new SearchParameterMap();
params.add(Constants.PARAM_CONTENT, new StringParam("FULLTEXT"));
patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients, containsInAnyOrder(oId1, oId2));
}
}
| |
package edu.ucsf.lava.core.action;
import static edu.ucsf.lava.core.session.CoreSessionUtils.CURRENT_ACTION;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import edu.ucsf.lava.core.action.model.Action;
import edu.ucsf.lava.core.environment.EnvironmentManager;
import edu.ucsf.lava.core.manager.LavaManager;
import edu.ucsf.lava.core.manager.CoreManagerUtils;
import edu.ucsf.lava.core.manager.Managers;
import edu.ucsf.lava.core.scope.ScopeActionDelegate;
import edu.ucsf.lava.core.scope.ScopeManager;
import edu.ucsf.lava.core.session.CoreSessionUtils;
import edu.ucsf.lava.core.session.SessionManager;
public class ActionManager extends LavaManager {
public static String ACTION_MANAGER_NAME="actionManager";
protected ActionDefinitions actionDefinitions;
protected ActionRegistry actionRegistry = new ActionRegistry();
protected ScopeManager scopeManager;
protected SessionManager sessionManager;
protected EnvironmentManager environmentManager;
/** Logger for this class and subclasses */
protected final Log logger = LogFactory.getLog(getClass());
public ActionManager(){
super(ACTION_MANAGER_NAME);
}
public void updateManagers(Managers managers) {
super.updateManagers(managers);
scopeManager = CoreManagerUtils.getScopeManager(managers);
sessionManager = CoreManagerUtils.getSessionManager(managers);
environmentManager = CoreManagerUtils.getEnvironmentManager(managers);
}
/**
* create a registry (map) of action lookup keys to actions from the
* actionDefinitions. Encapsulates the logic for generating the registry
* to the action scope delegates. This facilitates abstracting knowledge of
* scope specific default action rules out of the core.
*/
public void reloadActionDefinitions(){
//Copy all action definitions into temp action registry
ActionRegistry registry = new ActionRegistry();
registry.setActions(actionDefinitions.getDefinitionsCopy());
//pass the registry to each scope delegate (in order)
for(ScopeActionDelegate delegate : scopeManager.getActionDelegates().getOrderedDelegates()){
registry = delegate.onReloadActionDefinitions(this,registry);
}
//set the generated registry as the current actionRegistry
this.actionRegistry = registry;
}
public Action getCurrentAction(HttpServletRequest request){
return (Action)request.getSession().getAttribute(CURRENT_ACTION);
}
public void setCurrentAction(HttpServletRequest request, Action action){
request.getSession().setAttribute(CURRENT_ACTION,action);
}
public ActionRegistry getActionRegistry() {
return actionRegistry;
}
/**
* Get the action using the id
* would be loaded by getEffectiveRuntimeAction()
* @param actionId
* @return
*/
public Action getAction(String actionId){
if(actionRegistry.containsAction(actionId)){
return (Action) actionRegistry.getAction(actionId);
}
return null;
}
/**
* Determine the effective "runtime" action based on the actionId passed in and the
* current runtime conditions (e.g. what instance is running).
*
* @param request
* @param actionId
* @return
*/
public Action getEffectiveAction(HttpServletRequest request, String actionId)
{
ScopeActionDelegate delegate = scopeManager.getActionDelegate(ActionUtils.getScope(actionId));
if(delegate==null){return null;}
return delegate.resolveEffectiveAction(this,request,actionId);
}
/**
* Convenience method that determines the effective action (in the absense
* of an active request)
*
* @param actionId
* @return
*/
public Action getEffectiveAction(String actionId)
{
return getEffectiveAction(null,actionId);
}
/**
* Determines the flow id based on the request.
* @return
*/
public String extractFlowIdFromRequest(HttpServletRequest request){
Action action = this.getEffectiveAction(request, ActionUtils.getActionId(request));
if(action==null){return null;}
ScopeActionDelegate delegate = scopeManager.getActionDelegate(action);
if(delegate==null){return null;}
return delegate.extractFlowIdFromAction(this, request, action);
}
/*
* Given an actionId, determine if there is a corresponding instance specific actionId
* for which flows will be built (in which case these instance specific flows will always
* be used instead of the corresponding core flows) or whether the core actionId should
* be used to build the flows.
*
* @return effective actionId used to construct the flow id
*/
public boolean shouldBuildFlowsForAction(String actionId) {
ScopeActionDelegate delegate = scopeManager.getActionDelegate(ActionUtils.getScope(actionId));
if(delegate==null){return false;}
return delegate.shouldBuildFlowsForAction(this,actionId);
}
public String getWebAppInstanceName(){
if(environmentManager==null){ return "";}
return environmentManager.getInstanceName();
}
public Action getDefaultAction(HttpServletRequest request,Action actionIn){
ScopeActionDelegate delegate = scopeManager.getActionDelegate(actionIn);
if(delegate==null){return null;}
return delegate.getDefaultAction(this,request, actionIn);
}
public Action getDefaultAction(HttpServletRequest request)
{
return getDefaultAction(request,CoreSessionUtils.getCurrentAction(sessionManager, request));
}
public ActionDefinitions getActionDefinitions() {
return actionDefinitions;
}
public void setActionDefinitions(ActionDefinitions actionDefinitions) {
this.actionDefinitions = actionDefinitions;
}
}
| |
/*
Copyright (c) 2001, Dr Martin Porter
Copyright (c) 2002, Richard Boulton
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* Neither the name of the copyright holders nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package opennlp.tools.stemmer.snowball;
import java.lang.reflect.InvocationTargetException;
class SnowballProgram {
protected SnowballProgram()
{
current = new StringBuffer();
setCurrent("");
}
/**
* Set the current string.
*/
public void setCurrent(String value)
{
current.replace(0, current.length(), value);
cursor = 0;
limit = current.length();
limit_backward = 0;
bra = cursor;
ket = limit;
}
/**
* Get the current string.
*/
public String getCurrent()
{
String result = current.toString();
// Make a new StringBuffer. If we reuse the old one, and a user of
// the library keeps a reference to the buffer returned (for example,
// by converting it to a String in a way which doesn't force a copy),
// the buffer size will not decrease, and we will risk wasting a large
// amount of memory.
// Thanks to Wolfram Esser for spotting this problem.
current = new StringBuffer();
return result;
}
// current string
protected StringBuffer current;
protected int cursor;
protected int limit;
protected int limit_backward;
protected int bra;
protected int ket;
protected void copy_from(SnowballProgram other)
{
current = other.current;
cursor = other.cursor;
limit = other.limit;
limit_backward = other.limit_backward;
bra = other.bra;
ket = other.ket;
}
protected boolean in_grouping(char [] s, int min, int max)
{
if (cursor >= limit) return false;
char ch = current.charAt(cursor);
if (ch > max || ch < min) return false;
ch -= min;
if ((s[ch >> 3] & (0X1 << (ch & 0X7))) == 0) return false;
cursor++;
return true;
}
protected boolean in_grouping_b(char [] s, int min, int max)
{
if (cursor <= limit_backward) return false;
char ch = current.charAt(cursor - 1);
if (ch > max || ch < min) return false;
ch -= min;
if ((s[ch >> 3] & (0X1 << (ch & 0X7))) == 0) return false;
cursor--;
return true;
}
protected boolean out_grouping(char [] s, int min, int max)
{
if (cursor >= limit) return false;
char ch = current.charAt(cursor);
if (ch > max || ch < min) {
cursor++;
return true;
}
ch -= min;
if ((s[ch >> 3] & (0X1 << (ch & 0X7))) == 0) {
cursor ++;
return true;
}
return false;
}
protected boolean out_grouping_b(char [] s, int min, int max)
{
if (cursor <= limit_backward) return false;
char ch = current.charAt(cursor - 1);
if (ch > max || ch < min) {
cursor--;
return true;
}
ch -= min;
if ((s[ch >> 3] & (0X1 << (ch & 0X7))) == 0) {
cursor--;
return true;
}
return false;
}
protected boolean in_range(int min, int max)
{
if (cursor >= limit) return false;
char ch = current.charAt(cursor);
if (ch > max || ch < min) return false;
cursor++;
return true;
}
protected boolean in_range_b(int min, int max)
{
if (cursor <= limit_backward) return false;
char ch = current.charAt(cursor - 1);
if (ch > max || ch < min) return false;
cursor--;
return true;
}
protected boolean out_range(int min, int max)
{
if (cursor >= limit) return false;
char ch = current.charAt(cursor);
if (!(ch > max || ch < min)) return false;
cursor++;
return true;
}
protected boolean out_range_b(int min, int max)
{
if (cursor <= limit_backward) return false;
char ch = current.charAt(cursor - 1);
if(!(ch > max || ch < min)) return false;
cursor--;
return true;
}
protected boolean eq_s(int s_size, String s)
{
if (limit - cursor < s_size) return false;
int i;
for (i = 0; i != s_size; i++) {
if (current.charAt(cursor + i) != s.charAt(i)) return false;
}
cursor += s_size;
return true;
}
protected boolean eq_s_b(int s_size, String s)
{
if (cursor - limit_backward < s_size) return false;
int i;
for (i = 0; i != s_size; i++) {
if (current.charAt(cursor - s_size + i) != s.charAt(i)) return false;
}
cursor -= s_size;
return true;
}
protected boolean eq_v(CharSequence s)
{
return eq_s(s.length(), s.toString());
}
protected boolean eq_v_b(CharSequence s)
{ return eq_s_b(s.length(), s.toString());
}
protected int find_among(Among v[], int v_size)
{
int i = 0;
int j = v_size;
int c = cursor;
int l = limit;
int common_i = 0;
int common_j = 0;
boolean first_key_inspected = false;
while(true) {
int k = i + ((j - i) >> 1);
int diff = 0;
int common = common_i < common_j ? common_i : common_j; // smaller
Among w = v[k];
int i2;
for (i2 = common; i2 < w.s_size; i2++) {
if (c + common == l) {
diff = -1;
break;
}
diff = current.charAt(c + common) - w.s[i2];
if (diff != 0) break;
common++;
}
if (diff < 0) {
j = k;
common_j = common;
} else {
i = k;
common_i = common;
}
if (j - i <= 1) {
if (i > 0) break; // v->s has been inspected
if (j == i) break; // only one item in v
// - but now we need to go round once more to get
// v->s inspected. This looks messy, but is actually
// the optimal approach.
if (first_key_inspected) break;
first_key_inspected = true;
}
}
while(true) {
Among w = v[i];
if (common_i >= w.s_size) {
cursor = c + w.s_size;
if (w.method == null) return w.result;
boolean res;
try {
Object resobj = w.method.invoke(w.methodobject,
new Object[0]);
res = resobj.toString().equals("true");
} catch (InvocationTargetException e) {
res = false;
// FIXME - debug message
} catch (IllegalAccessException e) {
res = false;
// FIXME - debug message
}
cursor = c + w.s_size;
if (res) return w.result;
}
i = w.substring_i;
if (i < 0) return 0;
}
}
// find_among_b is for backwards processing. Same comments apply
protected int find_among_b(Among v[], int v_size)
{
int i = 0;
int j = v_size;
int c = cursor;
int lb = limit_backward;
int common_i = 0;
int common_j = 0;
boolean first_key_inspected = false;
while(true) {
int k = i + ((j - i) >> 1);
int diff = 0;
int common = common_i < common_j ? common_i : common_j;
Among w = v[k];
int i2;
for (i2 = w.s_size - 1 - common; i2 >= 0; i2--) {
if (c - common == lb) {
diff = -1;
break;
}
diff = current.charAt(c - 1 - common) - w.s[i2];
if (diff != 0) break;
common++;
}
if (diff < 0) {
j = k;
common_j = common;
} else {
i = k;
common_i = common;
}
if (j - i <= 1) {
if (i > 0) break;
if (j == i) break;
if (first_key_inspected) break;
first_key_inspected = true;
}
}
while(true) {
Among w = v[i];
if (common_i >= w.s_size) {
cursor = c - w.s_size;
if (w.method == null) return w.result;
boolean res;
try {
Object resobj = w.method.invoke(w.methodobject,
new Object[0]);
res = resobj.toString().equals("true");
} catch (InvocationTargetException e) {
res = false;
// FIXME - debug message
} catch (IllegalAccessException e) {
res = false;
// FIXME - debug message
}
cursor = c - w.s_size;
if (res) return w.result;
}
i = w.substring_i;
if (i < 0) return 0;
}
}
/* to replace chars between c_bra and c_ket in current by the
* chars in s.
*/
protected int replace_s(int c_bra, int c_ket, String s)
{
int adjustment = s.length() - (c_ket - c_bra);
current.replace(c_bra, c_ket, s);
limit += adjustment;
if (cursor >= c_ket) cursor += adjustment;
else if (cursor > c_bra) cursor = c_bra;
return adjustment;
}
protected void slice_check()
{
if (bra < 0 ||
bra > ket ||
ket > limit ||
limit > current.length()) // this line could be removed
{
System.err.println("faulty slice operation");
// FIXME: report error somehow.
/*
fprintf(stderr, "faulty slice operation:\n");
debug(z, -1, 0);
exit(1);
*/
}
}
protected void slice_from(String s)
{
slice_check();
replace_s(bra, ket, s);
}
protected void slice_from(CharSequence s)
{
slice_from(s.toString());
}
protected void slice_del()
{
slice_from("");
}
protected void insert(int c_bra, int c_ket, String s)
{
int adjustment = replace_s(c_bra, c_ket, s);
if (c_bra <= bra) bra += adjustment;
if (c_bra <= ket) ket += adjustment;
}
protected void insert(int c_bra, int c_ket, CharSequence s)
{
insert(c_bra, c_ket, s.toString());
}
/* Copy the slice into the supplied StringBuffer */
protected StringBuffer slice_to(StringBuffer s)
{
slice_check();
int len = ket - bra;
s.replace(0, s.length(), current.substring(bra, ket));
return s;
}
/* Copy the slice into the supplied StringBuilder */
protected StringBuilder slice_to(StringBuilder s)
{
slice_check();
int len = ket - bra;
s.replace(0, s.length(), current.substring(bra, ket));
return s;
}
protected StringBuffer assign_to(StringBuffer s)
{
s.replace(0, s.length(), current.substring(0, limit));
return s;
}
protected StringBuilder assign_to(StringBuilder s)
{
s.replace(0, s.length(), current.substring(0, limit));
return s;
}
/*
extern void debug(struct SN_env * z, int number, int line_count)
{ int i;
int limit = SIZE(z->p);
//if (number >= 0) printf("%3d (line %4d): '", number, line_count);
if (number >= 0) printf("%3d (line %4d): [%d]'", number, line_count,limit);
for (i = 0; i <= limit; i++)
{ if (z->lb == i) printf("{");
if (z->bra == i) printf("[");
if (z->c == i) printf("|");
if (z->ket == i) printf("]");
if (z->l == i) printf("}");
if (i < limit)
{ int ch = z->p[i];
if (ch == 0) ch = '#';
printf("%c", ch);
}
}
printf("'\n");
}
*/
};
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.apps.svgbrowser;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.net.Authenticator;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.ResourceBundle;
import java.util.StringTokenizer;
import java.util.Vector;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.ImageIcon;
import javax.swing.JOptionPane;
import javax.swing.JProgressBar;
import javax.swing.UIManager;
import javax.swing.plaf.FontUIResource;
import org.apache.batik.swing.JSVGCanvas;
import org.apache.batik.swing.gvt.GVTTreeRendererAdapter;
import org.apache.batik.swing.gvt.GVTTreeRendererEvent;
import org.apache.batik.swing.svg.GVTTreeBuilderAdapter;
import org.apache.batik.swing.svg.GVTTreeBuilderEvent;
import org.apache.batik.swing.svg.SVGDocumentLoaderAdapter;
import org.apache.batik.swing.svg.SVGDocumentLoaderEvent;
import org.apache.batik.util.ApplicationSecurityEnforcer;
import org.apache.batik.util.Platform;
import org.apache.batik.util.ParsedURL;
import org.apache.batik.util.SVGConstants;
import org.apache.batik.util.XMLResourceDescriptor;
import org.apache.batik.util.resources.ResourceManager;
/**
* This class contains the main method of an SVG viewer.
*
* @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a>
* @version $Id$
*/
public class Main implements Application {
/**
* Extension used in addition to the scriptType value
* to read from the PreferenceManager whether or not the
* scriptType can be loaded.
*/
public static final String UNKNOWN_SCRIPT_TYPE_LOAD_KEY_EXTENSION
= ".load";
/**
* User home property
*/
public static final String PROPERTY_USER_HOME = "user.home";
/**
* System property for specifying an additional policy file.
*/
public static final String PROPERTY_JAVA_SECURITY_POLICY
= "java.security.policy";
/**
* Batik configuration sub-directory
*/
public static final String BATIK_CONFIGURATION_SUBDIRECTORY = ".batik";
/**
* Name of the Squiggle configuration file
*/
public static final String SQUIGGLE_CONFIGURATION_FILE = "preferences.xml";
/**
* Name of the Squiggle policy file
*/
public static final String SQUIGGLE_POLICY_FILE = "__svgbrowser.policy";
/**
* Entry for granting network access to scripts
*/
public static final String POLICY_GRANT_SCRIPT_NETWORK_ACCESS
= "grant {\n permission java.net.SocketPermission \"*\", \"listen, connect, resolve, accept\";\n};\n\n";
/**
* Entry for granting file system access to scripts
*/
public static final String POLICY_GRANT_SCRIPT_FILE_ACCESS
= "grant {\n permission java.io.FilePermission \"<<ALL FILES>>\", \"read\";\n};\n\n";
/**
* Entry for the list of recently visited URI
*/
public static final String PREFERENCE_KEY_VISITED_URI_LIST
= "preference.key.visited.uri.list";
/**
* Entry for the maximum number of last visited URIs
*/
public static final String PREFERENCE_KEY_VISITED_URI_LIST_LENGTH
= "preference.key.visited.uri.list.length";
/**
* List of separators between URI values in the preference
* file
*/
public static final String URI_SEPARATOR = " ";
/**
* Default font-family value.
*/
public static final String DEFAULT_DEFAULT_FONT_FAMILY
= "Arial, Helvetica, sans-serif";
/**
* SVG initialization file, used to trigger loading of most of
* the Batik classes
*/
public static final String SVG_INITIALIZATION = "resources/init.svg";
/**
* Stores the initialization file URI
*/
protected String svgInitializationURI;
/**
* Creates a viewer frame and shows it..
* @param args The command-line arguments.
*/
public static void main(String[] args) {
new Main(args);
}
/**
* The gui resources file name
*/
public static final String RESOURCES =
"org.apache.batik.apps.svgbrowser.resources.Main";
/**
* URL for Squiggle's security policy file
*/
public static final String SQUIGGLE_SECURITY_POLICY
= "org/apache/batik/apps/svgbrowser/resources/svgbrowser.policy";
/**
* The resource bundle
*/
protected static ResourceBundle bundle;
/**
* The resource manager
*/
protected static ResourceManager resources;
static {
bundle = ResourceBundle.getBundle(RESOURCES, Locale.getDefault());
resources = new ResourceManager(bundle);
}
/**
* The frame's icon.
*/
protected static ImageIcon frameIcon = new ImageIcon
(Main.class.getResource(resources.getString("Frame.icon")));
/**
* The preference manager.
*/
protected XMLPreferenceManager preferenceManager;
/**
* Maximum number of recently visited URIs
*/
public static final int MAX_VISITED_URIS = 10;
/**
* The array of last visited URIs
*/
protected Vector lastVisited = new Vector();
/**
* The actual allowed maximum number of last visited URIs
*/
protected int maxVisitedURIs = MAX_VISITED_URIS;
/**
* The arguments.
*/
protected String[] arguments;
/**
* Controls whether the application can override the
* system security policy property. This is done when there
* was no initial security policy specified when the application
* started, in which case Batik will use that property.
*/
protected boolean overrideSecurityPolicy = false;
/**
* Script security enforcement is delegated to the
* security utility
*/
protected ApplicationSecurityEnforcer securityEnforcer;
/**
* The option handlers.
*/
protected Map handlers = new HashMap();
{
handlers.put("-font-size", new FontSizeHandler());
}
/**
* The viewer frames.
*/
protected List viewerFrames = new LinkedList();
/**
* The preference dialog.
*/
protected PreferenceDialog preferenceDialog;
/**
* The UI specialization to use in the JSVGViewerFrames.
*/
protected String uiSpecialization;
/**
* Creates a new application.
* @param args The command-line arguments.
*/
public Main(String[] args) {
arguments = args;
if (Platform.isOSX) {
uiSpecialization = "OSX";
// Move the menu bars to the top of the screen.
System.setProperty("apple.laf.useScreenMenuBar", "true");
// Register listeners for the About and Preferences menu items
// in the application menu (using reflection).
try {
Class Application = Class.forName("com.apple.eawt.Application");
Class ApplicationListener =
Class.forName("com.apple.eawt.ApplicationListener");
Class ApplicationEvent =
Class.forName("com.apple.eawt.ApplicationEvent");
Method getApplication = Application.getMethod("getApplication",
new Class[0]);
Method addApplicationListener =
Application.getMethod("addApplicationListener",
new Class[] { ApplicationListener });
final Method setHandled =
ApplicationEvent.getMethod("setHandled",
new Class[] { Boolean.TYPE });
Method setEnabledPreferencesMenu =
Application.getMethod("setEnabledPreferencesMenu",
new Class[] { Boolean.TYPE });
InvocationHandler listenerHandler = new InvocationHandler() {
public Object invoke(Object proxy, Method method,
Object[] args) {
String name = method.getName();
if (name.equals("handleAbout")) {
JSVGViewerFrame relativeTo =
(JSVGViewerFrame) viewerFrames.get(0);
AboutDialog dlg = new AboutDialog(relativeTo);
// Work around pack() bug on some platforms
dlg.setSize(dlg.getPreferredSize());
dlg.setLocationRelativeTo(relativeTo);
dlg.setVisible(true);
dlg.toFront();
} else if (name.equals("handlePreferences")) {
JSVGViewerFrame relativeTo =
(JSVGViewerFrame) viewerFrames.get(0);
showPreferenceDialog(relativeTo);
} else if (name.equals("handleQuit")) {
// Do nothing, let the OS quit the app.
} else {
return null;
}
try {
setHandled.invoke(args[0],
new Object[] { Boolean.TRUE });
} catch (Exception e) {
}
return null;
}
};
Object application = getApplication.invoke(null, (Object[]) null);
setEnabledPreferencesMenu.invoke(application,
new Object[] { Boolean.TRUE });
Object listener =
Proxy.newProxyInstance(Main.class.getClassLoader(),
new Class[] { ApplicationListener },
listenerHandler);
addApplicationListener.invoke(application,
new Object[] { listener });
} catch (Exception ex) {
ex.printStackTrace();
uiSpecialization = null;
}
}
//
// Preferences
//
Map defaults = new HashMap(11);
defaults.put(PreferenceDialog.PREFERENCE_KEY_LANGUAGES,
Locale.getDefault().getLanguage());
defaults.put(PreferenceDialog.PREFERENCE_KEY_SHOW_RENDERING,
Boolean.FALSE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_AUTO_ADJUST_WINDOW,
Boolean.TRUE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_SELECTION_XOR_MODE,
Boolean.FALSE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_ENABLE_DOUBLE_BUFFERING,
Boolean.TRUE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_SHOW_DEBUG_TRACE,
Boolean.FALSE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_PROXY_HOST,
"");
defaults.put(PreferenceDialog.PREFERENCE_KEY_PROXY_PORT,
"");
defaults.put(PreferenceDialog.PREFERENCE_KEY_CSS_MEDIA,
"screen");
defaults.put(PreferenceDialog.PREFERENCE_KEY_DEFAULT_FONT_FAMILY,
DEFAULT_DEFAULT_FONT_FAMILY);
defaults.put(PreferenceDialog.PREFERENCE_KEY_IS_XML_PARSER_VALIDATING,
Boolean.FALSE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_ENFORCE_SECURE_SCRIPTING,
Boolean.TRUE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_FILE_ACCESS,
Boolean.FALSE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_NETWORK_ACCESS,
Boolean.FALSE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_LOAD_JAVA,
Boolean.TRUE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_LOAD_ECMASCRIPT,
Boolean.TRUE);
defaults.put(PreferenceDialog.PREFERENCE_KEY_ALLOWED_SCRIPT_ORIGIN,
new Integer(ResourceOrigin.DOCUMENT));
defaults.put(PreferenceDialog.PREFERENCE_KEY_ALLOWED_EXTERNAL_RESOURCE_ORIGIN,
new Integer(ResourceOrigin.ANY));
defaults.put(PREFERENCE_KEY_VISITED_URI_LIST,
"");
defaults.put(PREFERENCE_KEY_VISITED_URI_LIST_LENGTH,
new Integer(MAX_VISITED_URIS));
defaults.put(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_MODE,
new Integer(1));
defaults.put(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_CPU,
new Float(0.75f));
defaults.put(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_FPS,
new Float(10));
defaults.put(PreferenceDialog.PREFERENCE_KEY_USER_STYLESHEET_ENABLED,
Boolean.TRUE);
securityEnforcer
= new ApplicationSecurityEnforcer(this.getClass(),
SQUIGGLE_SECURITY_POLICY);
try {
preferenceManager = new XMLPreferenceManager(SQUIGGLE_CONFIGURATION_FILE,
defaults);
String dir = System.getProperty(PROPERTY_USER_HOME);
File f = new File(dir, BATIK_CONFIGURATION_SUBDIRECTORY);
f.mkdir();
XMLPreferenceManager.setPreferenceDirectory(f.getCanonicalPath());
preferenceManager.load();
setPreferences();
initializeLastVisited();
Authenticator.setDefault(new JAuthenticator());
} catch (Exception e) {
e.printStackTrace();
}
//
// Initialization
//
final AboutDialog initDialog = new AboutDialog();
((BorderLayout) initDialog.getContentPane().getLayout()).setVgap(8);
final JProgressBar pb = new JProgressBar(0, 3);
initDialog.getContentPane().add(pb, BorderLayout.SOUTH);
// Work around pack() bug on some platforms
Dimension ss = initDialog.getToolkit().getScreenSize();
Dimension ds = initDialog.getPreferredSize();
initDialog.setLocation((ss.width - ds.width) / 2,
(ss.height - ds.height) / 2);
initDialog.setSize(ds);
initDialog.setVisible(true);
final JSVGViewerFrame v = new JSVGViewerFrame(this);
JSVGCanvas c = v.getJSVGCanvas();
c.addSVGDocumentLoaderListener(new SVGDocumentLoaderAdapter() {
public void documentLoadingStarted(SVGDocumentLoaderEvent e) {
pb.setValue(1);
}
public void documentLoadingCompleted(SVGDocumentLoaderEvent e) {
pb.setValue(2);
}
});
c.addGVTTreeBuilderListener(new GVTTreeBuilderAdapter() {
public void gvtBuildCompleted(GVTTreeBuilderEvent e) {
pb.setValue(3);
}
});
c.addGVTTreeRendererListener(new GVTTreeRendererAdapter() {
public void gvtRenderingCompleted(GVTTreeRendererEvent e) {
initDialog.dispose();
v.dispose();
System.gc();
run();
}
});
c.setSize(100, 100);
svgInitializationURI = Main.class.getResource(SVG_INITIALIZATION).toString();
c.loadSVGDocument(svgInitializationURI);
}
/**
* Installs a custom policy file in the '.batik' directory. This is initialized
* with the content of the policy file coming with the distribution
*/
public void installCustomPolicyFile() throws IOException {
String securityPolicyProperty
= System.getProperty(PROPERTY_JAVA_SECURITY_POLICY);
if (overrideSecurityPolicy
||
securityPolicyProperty == null
||
"".equals(securityPolicyProperty)) {
// Access default policy file
ParsedURL policyURL = new ParsedURL(securityEnforcer.getPolicyURL());
// Override the user policy
String dir = System.getProperty(PROPERTY_USER_HOME);
File batikConfigDir = new File(dir, BATIK_CONFIGURATION_SUBDIRECTORY);
File policyFile = new File(batikConfigDir, SQUIGGLE_POLICY_FILE);
// Copy original policy file into local policy file
Reader r = new BufferedReader(new InputStreamReader(policyURL.openStream()));
Writer w = new FileWriter(policyFile);
char[] buf = new char[1024];
int n = 0;
while ( (n=r.read(buf, 0, buf.length)) != -1 ) {
w.write(buf, 0, n);
}
r.close();
// Now, append additional grants depending on the security
// settings
boolean grantScriptNetworkAccess
= preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_NETWORK_ACCESS);
boolean grantScriptFileAccess
= preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_FILE_ACCESS);
if (grantScriptNetworkAccess) {
w.write(POLICY_GRANT_SCRIPT_NETWORK_ACCESS);
}
if (grantScriptFileAccess) {
w.write(POLICY_GRANT_SCRIPT_FILE_ACCESS);
}
w.close();
// We now use the JAVA_SECURITY_POLICY property, so
// we allow override on subsequent calls.
overrideSecurityPolicy = true;
System.setProperty(PROPERTY_JAVA_SECURITY_POLICY,
policyFile.toURL().toString());
}
}
/**
* Runs the application.
*/
public void run() {
try {
int i = 0;
for (; i < arguments.length; i++) {
OptionHandler oh = (OptionHandler)handlers.get(arguments[i]);
if (oh == null) {
break;
}
i = oh.handleOption(i);
}
JSVGViewerFrame frame = createAndShowJSVGViewerFrame();
while (i < arguments.length) {
if (arguments[i].length() == 0) {
i++;
continue;
}
File file = new File(arguments[i]);
String uri = null;
try{
if (file.canRead()) {
uri = file.toURL().toString();
}
}catch(SecurityException se){
// Cannot access files.
}
if(uri == null){
uri = arguments[i];
ParsedURL purl = null;
purl = new ParsedURL(arguments[i]);
if (!purl.complete())
// This is not a valid uri
uri = null;
}
if (uri != null) {
if (frame == null)
frame = createAndShowJSVGViewerFrame();
frame.showSVGDocument(uri);
frame = null;
} else {
// Let the user know that we are
// skipping this file...
// Note that frame may be null, which is
// a valid argument for showMessageDialog
// NOTE: Need to revisit Resources/Messages usage to
// have a single entry point. Should have a
// formated message here instead of a + ...
JOptionPane.showMessageDialog
(frame,
resources.getString("Error.skipping.file")
+ arguments[i]);
}
i++;
}
} catch (Exception e) {
e.printStackTrace();
printUsage();
}
}
/**
* Prints the command line usage.
*/
protected void printUsage() {
System.out.println();
System.out.println(resources.getString("Command.header"));
System.out.println(resources.getString("Command.syntax"));
System.out.println();
System.out.println(resources.getString("Command.options"));
Iterator it = handlers.keySet().iterator();
while (it.hasNext()) {
String s = (String)it.next();
System.out.println(((OptionHandler)handlers.get(s)).getDescription());
}
}
/**
* This interface represents an option handler.
*/
protected interface OptionHandler {
/**
* Handles the current option.
* @return the index of argument just before the next one to handle.
*/
int handleOption(int i);
/**
* Returns the option description.
*/
String getDescription();
}
/**
* To handle the '-font-size' option.
*/
protected class FontSizeHandler implements OptionHandler {
public int handleOption(int i) {
int size = Integer.parseInt(arguments[++i]);
Font font = new Font("Dialog", Font.PLAIN, size);
FontUIResource fontRes = new FontUIResource(font);
UIManager.put("CheckBox.font", fontRes);
UIManager.put("PopupMenu.font", fontRes);
UIManager.put("TextPane.font", fontRes);
UIManager.put("MenuItem.font", fontRes);
UIManager.put("ComboBox.font", fontRes);
UIManager.put("Button.font", fontRes);
UIManager.put("Tree.font", fontRes);
UIManager.put("ScrollPane.font", fontRes);
UIManager.put("TabbedPane.font", fontRes);
UIManager.put("EditorPane.font", fontRes);
UIManager.put("TitledBorder.font", fontRes);
UIManager.put("Menu.font", fontRes);
UIManager.put("TextArea.font", fontRes);
UIManager.put("OptionPane.font", fontRes);
UIManager.put("DesktopIcon.font", fontRes);
UIManager.put("MenuBar.font", fontRes);
UIManager.put("ToolBar.font", fontRes);
UIManager.put("RadioButton.font", fontRes);
UIManager.put("RadioButtonMenuItem.font", fontRes);
UIManager.put("ToggleButton.font", fontRes);
UIManager.put("ToolTip.font", fontRes);
UIManager.put("ProgressBar.font", fontRes);
UIManager.put("TableHeader.font", fontRes);
UIManager.put("Panel.font", fontRes);
UIManager.put("List.font", fontRes);
UIManager.put("ColorChooser.font", fontRes);
UIManager.put("PasswordField.font", fontRes);
UIManager.put("TextField.font", fontRes);
UIManager.put("Table.font", fontRes);
UIManager.put("Label.font", fontRes);
UIManager.put("InternalFrameTitlePane.font", fontRes);
UIManager.put("CheckBoxMenuItem.font", fontRes);
return i;
}
public String getDescription() {
return resources.getString("Command.font-size");
}
}
// Application ///////////////////////////////////////////////
/**
* Creates and shows a new viewer frame.
*/
public JSVGViewerFrame createAndShowJSVGViewerFrame() {
JSVGViewerFrame mainFrame = new JSVGViewerFrame(this);
mainFrame.setSize(resources.getInteger("Frame.width"),
resources.getInteger("Frame.height"));
mainFrame.setIconImage(frameIcon.getImage());
mainFrame.setTitle(resources.getString("Frame.title"));
mainFrame.setVisible(true);
viewerFrames.add(mainFrame);
setPreferences(mainFrame);
return mainFrame;
}
/**
* Closes the given viewer frame.
*/
public void closeJSVGViewerFrame(JSVGViewerFrame f) {
f.getJSVGCanvas().stopProcessing();
viewerFrames.remove(f);
if (viewerFrames.size() == 0) {
System.exit(0);
}
f.dispose();
}
/**
* Creates a new application exit action.
*/
public Action createExitAction(JSVGViewerFrame vf) {
return new AbstractAction() {
public void actionPerformed(ActionEvent e) {
System.exit(0);
}
};
}
/**
* Opens the given link in a new window.
*/
public void openLink(String url) {
JSVGViewerFrame f = createAndShowJSVGViewerFrame();
f.getJSVGCanvas().loadSVGDocument(url);
}
/**
* Returns the XML parser class name.
*/
public String getXMLParserClassName() {
return XMLResourceDescriptor.getXMLParserClassName();
}
/**
* Returns true if the XML parser must be in validation mode, false
* otherwise.
*/
public boolean isXMLParserValidating() {
return preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_IS_XML_PARSER_VALIDATING);
}
/**
* Shows the preference dialog.
*/
public void showPreferenceDialog(JSVGViewerFrame f) {
if (preferenceDialog == null) {
preferenceDialog = new PreferenceDialog(f, preferenceManager);
}
if (preferenceDialog.showDialog() == PreferenceDialog.OK_OPTION) {
try {
preferenceManager.save();
setPreferences();
} catch (Exception e) {
}
}
}
private void setPreferences() throws IOException {
Iterator it = viewerFrames.iterator();
while (it.hasNext()) {
setPreferences((JSVGViewerFrame)it.next());
}
System.setProperty("proxyHost", preferenceManager.getString
(PreferenceDialog.PREFERENCE_KEY_PROXY_HOST));
System.setProperty("proxyPort", preferenceManager.getString
(PreferenceDialog.PREFERENCE_KEY_PROXY_PORT));
installCustomPolicyFile();
securityEnforcer.enforceSecurity
(preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_ENFORCE_SECURE_SCRIPTING)
);
}
private void setPreferences(JSVGViewerFrame vf) {
boolean db = preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_ENABLE_DOUBLE_BUFFERING);
vf.getJSVGCanvas().setDoubleBufferedRendering(db);
boolean sr = preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_SHOW_RENDERING);
vf.getJSVGCanvas().setProgressivePaint(sr);
boolean d = preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_SHOW_DEBUG_TRACE);
vf.setDebug(d);
boolean aa = preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_AUTO_ADJUST_WINDOW);
vf.setAutoAdjust(aa);
boolean dd = preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_SELECTION_XOR_MODE);
vf.getJSVGCanvas().setSelectionOverlayXORMode(dd);
int al = preferenceManager.getInteger
(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_MODE);
if (al < 0 || al > 2) {
al = 1;
}
switch (al) {
case 0: // none
vf.getJSVGCanvas().setAnimationLimitingNone();
break;
case 1: { // %cpu
float pc = preferenceManager.getFloat
(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_CPU);
if (pc <= 0f || pc > 1.0f) {
pc = 0.75f;
}
vf.getJSVGCanvas().setAnimationLimitingCPU(pc);
break;
}
case 2: { // fps
float fps = preferenceManager.getFloat
(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_FPS);
if (fps <= 0f) {
fps = 10f;
}
vf.getJSVGCanvas().setAnimationLimitingFPS(fps);
break;
}
}
}
/**
* Returns the user languages.
*/
public String getLanguages() {
String s = preferenceManager.getString
(PreferenceDialog.PREFERENCE_KEY_LANGUAGES);
return (s == null)
? Locale.getDefault().getLanguage()
: s;
}
/**
* Returns the user stylesheet uri.
* @return null if no user style sheet was specified.
*/
public String getUserStyleSheetURI() {
boolean enabled = preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_USER_STYLESHEET_ENABLED);
String ssPath = preferenceManager.getString
(PreferenceDialog.PREFERENCE_KEY_USER_STYLESHEET);
if (!enabled || ssPath.length() == 0) {
return null;
}
try {
File f = new File(ssPath);
if (f.exists()) {
return f.toURL().toString();
}
} catch (IOException ioe) {
// Nothing...
}
return ssPath;
}
/**
* Returns the default value for the CSS
* "font-family" property
*/
public String getDefaultFontFamily() {
return preferenceManager.getString
(PreferenceDialog.PREFERENCE_KEY_DEFAULT_FONT_FAMILY);
}
/**
* Returns the CSS media to use.
* @return empty string if no CSS media was specified.
*/
public String getMedia() {
String s = preferenceManager.getString
(PreferenceDialog.PREFERENCE_KEY_CSS_MEDIA);
return (s == null) ? "screen" : s;
}
/**
* Returns true if the selection overlay is painted in XOR mode, false
* otherwise.
*/
public boolean isSelectionOverlayXORMode() {
return preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_SELECTION_XOR_MODE);
}
/**
* Returns true if the input scriptType can be loaded in
* this application.
*/
public boolean canLoadScriptType(String scriptType){
if (SVGConstants.SVG_SCRIPT_TYPE_ECMASCRIPT.equals(scriptType)
|| SVGConstants.SVG_SCRIPT_TYPE_APPLICATION_ECMASCRIPT
.equals(scriptType)
|| SVGConstants.SVG_SCRIPT_TYPE_JAVASCRIPT.equals(scriptType)
|| SVGConstants.SVG_SCRIPT_TYPE_APPLICATION_JAVASCRIPT
.equals(scriptType)) {
return preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_LOAD_ECMASCRIPT);
} else if (SVGConstants.SVG_SCRIPT_TYPE_JAVA.equals(scriptType)) {
return preferenceManager.getBoolean
(PreferenceDialog.PREFERENCE_KEY_LOAD_JAVA);
} else {
return preferenceManager.getBoolean
(scriptType + UNKNOWN_SCRIPT_TYPE_LOAD_KEY_EXTENSION);
}
}
/**
* Returns the allowed origins for scripts.
* @see ResourceOrigin
*/
public int getAllowedScriptOrigin() {
int ret = preferenceManager.getInteger
(PreferenceDialog.PREFERENCE_KEY_ALLOWED_SCRIPT_ORIGIN);
return ret;
}
/**
* Returns the allowed origins for external
* resources.
* @see ResourceOrigin
*/
public int getAllowedExternalResourceOrigin() {
int ret = preferenceManager.getInteger
(PreferenceDialog.PREFERENCE_KEY_ALLOWED_EXTERNAL_RESOURCE_ORIGIN);
return ret;
}
/**
* Notifies Application of recently visited URI
*/
public void addVisitedURI(String uri) {
if(svgInitializationURI.equals(uri)) {
return;
}
int maxVisitedURIs =
preferenceManager.getInteger
(PREFERENCE_KEY_VISITED_URI_LIST_LENGTH);
if (maxVisitedURIs < 0) {
maxVisitedURIs = 0;
}
if (lastVisited.contains(uri)) {
lastVisited.removeElement(uri);
}
while (lastVisited.size() > 0 && lastVisited.size() > (maxVisitedURIs-1)) {
lastVisited.removeElementAt(0);
}
if (maxVisitedURIs > 0) {
lastVisited.addElement(uri);
}
// Now, save the list of visited URL into the preferences
StringBuffer lastVisitedBuffer = new StringBuffer( lastVisited.size() * 8 );
for (int i=0; i<lastVisited.size(); i++) {
lastVisitedBuffer.append
(URLEncoder.encode(lastVisited.get(i).toString()));
lastVisitedBuffer.append(URI_SEPARATOR);
}
preferenceManager.setString
(PREFERENCE_KEY_VISITED_URI_LIST,
lastVisitedBuffer.toString());
try {
preferenceManager.save();
} catch (Exception e) {
// As in other places. But this is ugly...
}
}
/**
* Asks Application for a list of recently visited URI.
*/
public String[] getVisitedURIs() {
String[] visitedURIs = new String[lastVisited.size()];
lastVisited.toArray(visitedURIs);
return visitedURIs;
}
/**
* Returns the UI resource specialization to use.
*/
public String getUISpecialization() {
return uiSpecialization;
}
/**
* Initializes the lastVisited array
*/
protected void initializeLastVisited(){
String lastVisitedStr
= preferenceManager.getString(PREFERENCE_KEY_VISITED_URI_LIST);
StringTokenizer st
= new StringTokenizer(lastVisitedStr,
URI_SEPARATOR);
int n = st.countTokens();
int maxVisitedURIs
= preferenceManager.getInteger
(PREFERENCE_KEY_VISITED_URI_LIST_LENGTH);
if (n > maxVisitedURIs) {
n = maxVisitedURIs;
}
for (int i=0; i<n; i++) {
lastVisited.addElement(URLDecoder.decode(st.nextToken()));
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* An access configuration attached to an instance's network interface. Only one access config per
* instance is supported.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class AccessConfig extends com.google.api.client.json.GenericJson {
/**
* The first IPv6 address of the external IPv6 range associated with this instance, prefix length
* is stored in externalIpv6PrefixLength in ipv6AccessConfig. The field is output only, an IPv6
* address from a subnetwork associated with the instance will be allocated dynamically.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String externalIpv6;
/**
* The prefix length of the external IPv6 range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer externalIpv6PrefixLength;
/**
* [Output Only] Type of the resource. Always compute#accessConfig for access configs.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The name of this access configuration. The default and recommended name is External NAT, but
* you can use any arbitrary string, such as My external IP or Network Access.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* An external IP address associated with this instance. Specify an unused static external IP
* address available to the project or leave this field undefined to use an IP from a shared
* ephemeral IP address pool. If you specify a static external IP address, it must live in the
* same region as the zone of the instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String natIP;
/**
* This signifies the networking tier used for configuring this access configuration and can only
* take the following values: PREMIUM, STANDARD. If an AccessConfig is specified without a valid
* external IP address, an ephemeral IP will be created with this networkTier. If an AccessConfig
* with a valid external IP address is specified, it must match that of the networkTier associated
* with the Address resource owning that IP.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String networkTier;
/**
* [Output Only] The public DNS domain name for the instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String publicDnsName;
/**
* The DNS domain name for the public PTR record. You can set this field only if the
* `setPublicPtr` field is enabled in accessConfig. If this field is unspecified in
* ipv6AccessConfig, a default PTR record will be createc for first IP in associated external IPv6
* range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String publicPtrDomainName;
/**
* Specifies whether a public DNS 'A' record should be created for the external IP address of this
* access configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean setPublicDns;
/**
* Specifies whether a public DNS 'PTR' record should be created to map the external IP address of
* the instance to a DNS domain name. This field is not used in ipv6AccessConfig. A default PTR
* record will be created if the VM has external IPv6 range associated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean setPublicPtr;
/**
* The type of configuration. The default and only option is ONE_TO_ONE_NAT.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* The first IPv6 address of the external IPv6 range associated with this instance, prefix length
* is stored in externalIpv6PrefixLength in ipv6AccessConfig. The field is output only, an IPv6
* address from a subnetwork associated with the instance will be allocated dynamically.
* @return value or {@code null} for none
*/
public java.lang.String getExternalIpv6() {
return externalIpv6;
}
/**
* The first IPv6 address of the external IPv6 range associated with this instance, prefix length
* is stored in externalIpv6PrefixLength in ipv6AccessConfig. The field is output only, an IPv6
* address from a subnetwork associated with the instance will be allocated dynamically.
* @param externalIpv6 externalIpv6 or {@code null} for none
*/
public AccessConfig setExternalIpv6(java.lang.String externalIpv6) {
this.externalIpv6 = externalIpv6;
return this;
}
/**
* The prefix length of the external IPv6 range.
* @return value or {@code null} for none
*/
public java.lang.Integer getExternalIpv6PrefixLength() {
return externalIpv6PrefixLength;
}
/**
* The prefix length of the external IPv6 range.
* @param externalIpv6PrefixLength externalIpv6PrefixLength or {@code null} for none
*/
public AccessConfig setExternalIpv6PrefixLength(java.lang.Integer externalIpv6PrefixLength) {
this.externalIpv6PrefixLength = externalIpv6PrefixLength;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#accessConfig for access configs.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#accessConfig for access configs.
* @param kind kind or {@code null} for none
*/
public AccessConfig setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The name of this access configuration. The default and recommended name is External NAT, but
* you can use any arbitrary string, such as My external IP or Network Access.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The name of this access configuration. The default and recommended name is External NAT, but
* you can use any arbitrary string, such as My external IP or Network Access.
* @param name name or {@code null} for none
*/
public AccessConfig setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* An external IP address associated with this instance. Specify an unused static external IP
* address available to the project or leave this field undefined to use an IP from a shared
* ephemeral IP address pool. If you specify a static external IP address, it must live in the
* same region as the zone of the instance.
* @return value or {@code null} for none
*/
public java.lang.String getNatIP() {
return natIP;
}
/**
* An external IP address associated with this instance. Specify an unused static external IP
* address available to the project or leave this field undefined to use an IP from a shared
* ephemeral IP address pool. If you specify a static external IP address, it must live in the
* same region as the zone of the instance.
* @param natIP natIP or {@code null} for none
*/
public AccessConfig setNatIP(java.lang.String natIP) {
this.natIP = natIP;
return this;
}
/**
* This signifies the networking tier used for configuring this access configuration and can only
* take the following values: PREMIUM, STANDARD. If an AccessConfig is specified without a valid
* external IP address, an ephemeral IP will be created with this networkTier. If an AccessConfig
* with a valid external IP address is specified, it must match that of the networkTier associated
* with the Address resource owning that IP.
* @return value or {@code null} for none
*/
public java.lang.String getNetworkTier() {
return networkTier;
}
/**
* This signifies the networking tier used for configuring this access configuration and can only
* take the following values: PREMIUM, STANDARD. If an AccessConfig is specified without a valid
* external IP address, an ephemeral IP will be created with this networkTier. If an AccessConfig
* with a valid external IP address is specified, it must match that of the networkTier associated
* with the Address resource owning that IP.
* @param networkTier networkTier or {@code null} for none
*/
public AccessConfig setNetworkTier(java.lang.String networkTier) {
this.networkTier = networkTier;
return this;
}
/**
* [Output Only] The public DNS domain name for the instance.
* @return value or {@code null} for none
*/
public java.lang.String getPublicDnsName() {
return publicDnsName;
}
/**
* [Output Only] The public DNS domain name for the instance.
* @param publicDnsName publicDnsName or {@code null} for none
*/
public AccessConfig setPublicDnsName(java.lang.String publicDnsName) {
this.publicDnsName = publicDnsName;
return this;
}
/**
* The DNS domain name for the public PTR record. You can set this field only if the
* `setPublicPtr` field is enabled in accessConfig. If this field is unspecified in
* ipv6AccessConfig, a default PTR record will be createc for first IP in associated external IPv6
* range.
* @return value or {@code null} for none
*/
public java.lang.String getPublicPtrDomainName() {
return publicPtrDomainName;
}
/**
* The DNS domain name for the public PTR record. You can set this field only if the
* `setPublicPtr` field is enabled in accessConfig. If this field is unspecified in
* ipv6AccessConfig, a default PTR record will be createc for first IP in associated external IPv6
* range.
* @param publicPtrDomainName publicPtrDomainName or {@code null} for none
*/
public AccessConfig setPublicPtrDomainName(java.lang.String publicPtrDomainName) {
this.publicPtrDomainName = publicPtrDomainName;
return this;
}
/**
* Specifies whether a public DNS 'A' record should be created for the external IP address of this
* access configuration.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSetPublicDns() {
return setPublicDns;
}
/**
* Specifies whether a public DNS 'A' record should be created for the external IP address of this
* access configuration.
* @param setPublicDns setPublicDns or {@code null} for none
*/
public AccessConfig setSetPublicDns(java.lang.Boolean setPublicDns) {
this.setPublicDns = setPublicDns;
return this;
}
/**
* Specifies whether a public DNS 'PTR' record should be created to map the external IP address of
* the instance to a DNS domain name. This field is not used in ipv6AccessConfig. A default PTR
* record will be created if the VM has external IPv6 range associated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSetPublicPtr() {
return setPublicPtr;
}
/**
* Specifies whether a public DNS 'PTR' record should be created to map the external IP address of
* the instance to a DNS domain name. This field is not used in ipv6AccessConfig. A default PTR
* record will be created if the VM has external IPv6 range associated.
* @param setPublicPtr setPublicPtr or {@code null} for none
*/
public AccessConfig setSetPublicPtr(java.lang.Boolean setPublicPtr) {
this.setPublicPtr = setPublicPtr;
return this;
}
/**
* The type of configuration. The default and only option is ONE_TO_ONE_NAT.
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* The type of configuration. The default and only option is ONE_TO_ONE_NAT.
* @param type type or {@code null} for none
*/
public AccessConfig setType(java.lang.String type) {
this.type = type;
return this;
}
@Override
public AccessConfig set(String fieldName, Object value) {
return (AccessConfig) super.set(fieldName, value);
}
@Override
public AccessConfig clone() {
return (AccessConfig) super.clone();
}
}
| |
package edu.berkeley.lipstick.config;
import edu.berkeley.lipstick.backend.IExplicitBackend;
import edu.berkeley.lipstick.localstore.ILocalStore;
import edu.berkeley.lipstick.storage.IStorage;
import edu.berkeley.lipstick.util.serializer.IDWSerializer;
import org.yaml.snakeyaml.Yaml;
import java.io.File;
import java.io.FileInputStream;
import java.util.Map;
public class Config {
private static IStorage storage;
private static Object backend;
private static Map<String, Object> configOptions;
private static ILocalStore localStore;
private static String lipstickConfigEnvString = "lipstick.config";
private static String lipstickPidString = "lipstick.pid";
private static String simpleStorageHostString = "simple.storage.host";
private static String localStoreClassString = "localstore.class";
private static String simpleStoragePortString = "simple.storage.port";
private static String localstoreKyotoFilePathString = "localstore.kyoto.filepath";
private static String readLocalOnlyString = "backend.read.localonly";
private static String storageClassString = "storage.class";
private static String backendClassString = "backend.class";
private static String dwSerializerClassString = "dw.serializer.class";
private static String cassandraConsistencyLevelString = "cassandra.consistencylevel";
private static String cassandraNodeIPString = "cassandra.node.ip";
private static String cassandraNodePortString = "cassandra.node.port";
private static String cassandraKeyspaceString = "cassandra.keyspace";
private static String cassandraColumnFamilyString = "cassandra.columnfamily";
private static String backendAsyncSleepMSString = "backend.async.sleepms";
private static String resolveInBackgroundString = "backend.asyncresolve";
private static String backendMaxECDSString = "backend.maxsyncECDSreads";
private static String backendMaxKeysToCheck = "backend.maxKeysToCheck";
private static String backendMaxBufferedWrites = "backend.maxBufferedWrites";
static void getConfig() throws Exception {
if(configOptions != null)
return;
configOptions = (Map<String, Object>) (new Yaml()).load(new FileInputStream(new File(
System.getProperty(lipstickConfigEnvString))));
}
public static String getProcessID() throws Exception {
getConfig();
assert(configOptions.get(lipstickPidString ) != null);
return (String)configOptions.get(lipstickPidString );
}
public static long getBytesRead() throws Exception {
getConfig();
return storage.getBytesRead();
}
public static long getBytesWritten() throws Exception {
getConfig();
return storage.getBytesWritten();
}
public static String getSimpleBackendHost() throws Exception {
getConfig();
assert(configOptions.get(simpleStorageHostString) != null);
return (String)configOptions.get(simpleStorageHostString);
}
public static ILocalStore getLocalStore() throws Exception {
getConfig();
if(localStore == null) {
Class newLocalStore = Class.forName((String)configOptions.get(localStoreClassString));
localStore = (ILocalStore) newLocalStore.newInstance();
}
return localStore;
}
public static int getSimpleBackendPort() throws Exception {
getConfig();
assert(configOptions.get(simpleStoragePortString) != null);
return (Integer)configOptions.get(simpleStoragePortString);
}
public static String getKyotoFilePath() throws Exception {
getConfig();
assert(configOptions.get(localstoreKyotoFilePathString) != null);
return (String)configOptions.get(localstoreKyotoFilePathString);
}
public static IExplicitBackend getExplicitBackend() throws Exception {
getConfig();
if(storage == null) {
Class storageClass = Class.forName((String)configOptions.get(storageClassString));
storage = (IStorage) storageClass.newInstance();
}
if(backend == null) {
Class backendClass = Class.forName((String)configOptions.get(backendClassString));
backend = backendClass.getDeclaredConstructor(IStorage.class).newInstance(storage);
}
//we're going to fail in the next line, but this is no longer sane anyway
assert(backend instanceof IExplicitBackend);
return (IExplicitBackend) backend;
}
public static IDWSerializer getDWSerializer() throws Exception {
getConfig();
Class serializerClass = Class.forName((String)configOptions.get(dwSerializerClassString));
return (IDWSerializer) serializerClass.newInstance();
}
public static String getCassandraConsistencyLevel() throws Exception {
getConfig();
assert(configOptions.get(cassandraConsistencyLevelString) != null);
return (String)configOptions.get(cassandraConsistencyLevelString);
}
public static String getCassandraIP() throws Exception {
getConfig();
assert(configOptions.get(cassandraNodeIPString) != null);
return (String)configOptions.get(cassandraNodeIPString);
}
public static Integer getCassandraPort() throws Exception {
getConfig();
assert(configOptions.get(cassandraNodePortString) != null);
return (Integer)configOptions.get(cassandraNodePortString);
}
public static String getCassandraKeyspace() throws Exception {
getConfig();
assert(configOptions.get(cassandraKeyspaceString) != null);
return (String)configOptions.get(cassandraKeyspaceString);
}
public static String getCassandraColumnFamily() throws Exception {
getConfig();
assert(configOptions.get(cassandraColumnFamilyString) != null);
return (String)configOptions.get(cassandraColumnFamilyString);
}
public static long getAsyncSleepLength() throws Exception {
getConfig();
assert(configOptions.get(backendAsyncSleepMSString) != null);
Long l = new Long((Integer)configOptions.get(backendAsyncSleepMSString));
return l.longValue();
}
public static long getStorageWriteLatency() throws Exception {
if(storage == null)
return 0;
return storage.getWriteLatency();
}
public static long getStorageReadLatency() throws Exception {
if(storage == null)
return 0;
return storage.getReadLatency();
}
public static boolean readLocalOnly() throws Exception {
getConfig();
assert(configOptions.get(readLocalOnlyString) != null);
return (Boolean) configOptions.get(readLocalOnlyString);
}
public static boolean doResolveInBackground() throws Exception {
getConfig();
assert(configOptions.get(resolveInBackgroundString) != null);
return (Boolean) configOptions.get(resolveInBackgroundString);
}
public static int getBackendMaxSyncECDSReads() throws Exception {
getConfig();
assert(configOptions.get(backendMaxECDSString) != null);
if(configOptions.get(backendMaxECDSString).equals("infinity"))
return Integer.MAX_VALUE;
return (Integer) configOptions.get(backendMaxECDSString);
}
public static int getMaxKeysToCheck() throws Exception {
getConfig();
assert(configOptions.get(backendMaxKeysToCheck) != null);
return (Integer) configOptions.get(backendMaxKeysToCheck);
}
public static int getMaxBufferedWrites() throws Exception {
getConfig();
assert(configOptions.get(backendMaxBufferedWrites) != null);
return (Integer) configOptions.get(backendMaxBufferedWrites);
}
}
| |
/*
* Copyright 2015, Yahoo Inc.
* Copyrights licensed under the Apache 2.0 License.
* See the accompanying LICENSE file for terms.
*/
package com.yahoo.squidb.sql;
import com.yahoo.squidb.data.ViewModel;
import com.yahoo.squidb.sql.Property.LongProperty;
import com.yahoo.squidb.utility.SquidUtilities;
import java.util.ArrayList;
import java.util.List;
/**
* Builder class for a SQLite SELECT statement
*/
public final class Query extends TableStatement {
/** Specifies this query has no limit */
public static final int NO_LIMIT = -1;
/** Specifies this query has no offset */
public static final int NO_OFFSET = 0;
private SqlTable<?> table = null;
private ArrayList<Field<?>> fields = null;
private ArrayList<Criterion> criterions = null;
private ArrayList<Join> joins = null;
private ArrayList<Field<?>> groupBies = null;
private ArrayList<Criterion> havings = null;
private ArrayList<CompoundSelect> compoundSelects = null;
private ArrayList<Order> orders = null;
private int limit = NO_LIMIT;
private int offset = NO_OFFSET;
private boolean distinct = false;
private boolean immutable = false;
private boolean needsValidation = false;
private ArrayList<Field<?>> selectAllCache = null;
private Query(List<Field<?>> fields) {
if (!isEmpty(fields)) {
this.fields = new ArrayList<Field<?>>(fields);
}
}
private Query(Field<?>... fields) {
if (!isEmpty(fields)) {
this.fields = new ArrayList<Field<?>>();
SquidUtilities.addAll(this.fields, fields);
}
}
/**
* Construct a new Select statement that queries for the specified {@link Field Fields}
*
* @param fields the Fields to select
* @return a new Query object
*/
public static Query select(Field<?>... fields) {
return new Query(fields);
}
/**
* Construct a new Select statement that queries for the specified {@link Field Fields}
*
* @param fields the Fields to select
* @return a new Query object
*/
public static Query select(List<Field<?>> fields) {
return new Query(fields);
}
/**
* Construct a new Select statement that queries for distinct values of the specified {@link Field Fields}
*
* @param fields the Fields to select
* @return a new Query object
*/
public static Query selectDistinct(Field<?>... fields) {
Query query = new Query(fields);
query.distinct = true;
return query;
}
/**
* Construct a new Select statement that queries for distinct values of the specified {@link Field Fields}
*
* @param fields the Fields to select
* @return a new Query object
*/
public static Query selectDistinct(List<Field<?>> fields) {
Query query = new Query(fields);
query.distinct = true;
return query;
}
/**
* Construct a new Select statement that queries all the {@link Field}s of the given subquery
*
* @param subquery the inner Query object
* @param subqueryAlias the name to alias the subquery
* @return a new Query object
*/
public static Query fromSubquery(Query subquery, String subqueryAlias) {
SubqueryTable table = subquery.as(subqueryAlias);
return Query.select(table.qualifiedFields()).from(table);
}
/**
* Construct a new Select statement that queries all the {@link Field Fields} of the given {@link View}
*
* @param view the View to select from
* @return a new Query object
*/
public static Query fromView(View view) {
return Query.select(view.qualifiedFields()).from(view);
}
/**
* Add more {@link Field Fields} to be selected
*
* @param fields the additional Fields to be selected
* @return this Query object, to allow chaining method calls
*/
public Query selectMore(Field<?>... fields) {
if (immutable) {
return fork().selectMore(fields);
}
if (!isEmpty(fields)) {
if (this.fields == null) {
this.fields = new ArrayList<Field<?>>();
}
SquidUtilities.addAll(this.fields, fields);
if (selectAllCache != null) {
selectAllCache.clear();
}
invalidateCompileCache();
}
return this;
}
/**
* Add more {@link Field Fields} to be selected
*
* @param fields the additional Fields to be selected
* @return this Query object, to allow chaining method calls
*/
public Query selectMore(List<Field<?>> fields) {
if (immutable) {
return fork().selectMore(fields);
}
if (!isEmpty(fields)) {
if (this.fields == null) {
this.fields = new ArrayList<Field<?>>(fields);
} else {
this.fields.addAll(fields);
}
if (selectAllCache != null) {
selectAllCache.clear();
}
invalidateCompileCache();
}
return this;
}
/**
* Set the {@link SqlTable table} this query selects from
*
* @param table the table to select from
* @return this Query object, to allow chaining method calls
*/
public Query from(SqlTable<?> table) {
if (immutable) {
return fork().from(table);
}
if (this.table != table) {
this.table = table;
updateNeedsValidationFromTable(table);
if (selectAllCache != null) {
selectAllCache.clear();
}
invalidateCompileCache();
}
return this;
}
/**
* Add a {@link Join} to this query
*
* @param joins one or more joins to apply to this query
* @return this Query object, to allow chaining method calls
*/
public Query join(Join... joins) {
if (immutable) {
return fork().join(joins);
}
if (this.joins == null) {
this.joins = new ArrayList<Join>();
}
SquidUtilities.addAll(this.joins, joins);
for (Join join : joins) {
updateNeedsValidationFromTable(join.joinTable);
}
if (selectAllCache != null) {
selectAllCache.clear();
}
invalidateCompileCache();
return this;
}
/**
* Add a left {@link Join} to this query using the ON clause
*
* @param table the table to join on
* @param onCriterions one or more criterions to use for the "on" clause
* @return this Query object, to allow chaining method calls
*/
public Query leftJoin(SqlTable<?> table, Criterion... onCriterions) {
return join(Join.left(table, onCriterions));
}
/**
* Add a left {@link Join} to this query using the USING clause
*
* @param table the table to join on
* @param usingColumns one or more columns to use for the "using" clause
* @return this Query object, to allow chaining method calls
*/
public Query leftJoin(SqlTable<?> table, Property<?>... usingColumns) {
return join(Join.left(table, usingColumns));
}
/**
* Add an inner {@link Join} to this query using the ON clause
*
* @param table the table to join on
* @param onCriterions one or more criterions to use for the "on" clause
* @return this Query object, to allow chaining method calls
*/
public Query innerJoin(SqlTable<?> table, Criterion... onCriterions) {
return join(Join.inner(table, onCriterions));
}
/**
* Add an inner {@link Join} to this query using the USING clause
*
* @param table the table to join on
* @param usingColumns one or more columns to use for the "using" clause
* @return this Query object, to allow chaining method calls
*/
public Query innerJoin(SqlTable<?> table, Property<?>... usingColumns) {
return join(Join.inner(table, usingColumns));
}
/**
* Add a {@link Criterion} to the WHERE clause of this query. Multiple calls will combine all the criterions with
* AND.
*
* @param criterion the Criterion to add to the WHERE clause
* @return this Query object, to allow chaining method calls
*/
public Query where(Criterion criterion) {
if (immutable) {
return fork().where(criterion);
}
if (criterions == null) {
criterions = new ArrayList<Criterion>();
}
criterions.add(criterion);
invalidateCompileCache();
return this;
}
/**
* Add a GROUP BY clause (or an additional grouping term) to this query
*
* @param fields one or more Fields to group on
* @return this Query object, to allow chaining method calls
*/
public Query groupBy(Field<?>... fields) {
if (immutable) {
return fork().groupBy(fields);
}
if (this.groupBies == null) {
this.groupBies = new ArrayList<Field<?>>();
}
SquidUtilities.addAll(this.groupBies, fields);
invalidateCompileCache();
return this;
}
/**
* Add a {@link Criterion} to the HAVING clause of this query. Multiple calls will combine all the criterions with
* AND.
*
* @param criterion the Criterion to add to the HAVING clause
* @return this Query object, to allow chaining method calls
*/
public Query having(Criterion criterion) {
if (immutable) {
return fork().having(criterion);
}
if (this.havings == null) {
this.havings = new ArrayList<Criterion>();
}
this.havings.add(criterion);
invalidateCompileCache();
return this;
}
/**
* Form a compound select with the given query using the UNION operator
*
* @param query a Query object to append with the UNION operator
* @return this Query object, to allow chaining method calls
* @see <a href="http://www.sqlite.org/lang_select.html#compound">http://www.sqlite.org/lang_select.html#compound</a>
*/
public Query union(Query query) {
if (immutable) {
return fork().union(query);
}
addCompoundSelect(CompoundSelect.union(query));
return this;
}
/**
* Form a compound select with the given query using the UNION ALL operator
*
* @param query a Query object to append with the UNION ALL operator
* @return this Query object, to allow chaining method calls
* @see <a href="http://www.sqlite.org/lang_select.html#compound">http://www.sqlite.org/lang_select.html#compound</a>
*/
public Query unionAll(Query query) {
if (immutable) {
return fork().unionAll(query);
}
addCompoundSelect(CompoundSelect.unionAll(query));
return this;
}
/**
* Form a compound select with the given query using the INTERSECT operator
*
* @param query a Query object to append with the INTERSECT operator
* @return this Query object, to allow chaining method calls
* @see <a href="http://www.sqlite.org/lang_select.html#compound">http://www.sqlite.org/lang_select.html#compound</a>
*/
public Query intersect(Query query) {
if (immutable) {
return fork().intersect(query);
}
addCompoundSelect(CompoundSelect.intersect(query));
return this;
}
/**
* Form a compound select with the given query using the EXCEPT operator
*
* @param query a Query object to append with the EXCEPT operator
* @return this Query object, to allow chaining method calls
* @see <a href="http://www.sqlite.org/lang_select.html#compound">http://www.sqlite.org/lang_select.html#compound</a>
*/
public Query except(Query query) {
if (immutable) {
return fork().except(query);
}
addCompoundSelect(CompoundSelect.except(query));
return this;
}
private void addCompoundSelect(CompoundSelect compoundSelect) {
if (this.compoundSelects == null) {
this.compoundSelects = new ArrayList<CompoundSelect>();
}
this.compoundSelects.add(compoundSelect);
invalidateCompileCache();
if (compoundSelect.query.needsValidation()) {
requestValidation();
}
}
/**
* Add an ORDER BY clause (or an additional ordering term) to this query
*
* @param orders one or more ordering terms
* @return this Query object, to allow chaining method calls
*/
public Query orderBy(Order... orders) {
if (immutable) {
return fork().orderBy(orders);
}
if (this.orders == null) {
this.orders = new ArrayList<Order>();
}
SquidUtilities.addAll(this.orders, orders);
invalidateCompileCache();
return this;
}
/**
* Set the limit of this statement. Using a negative value removes the limit.
*
* @param limit the maximum number of rows this query should return
* @return this Query object, to allow chaining method calls
*/
public Query limit(int limit) {
if (immutable) {
return fork().limit(limit);
}
if (this.limit != limit) {
this.limit = limit;
invalidateCompileCache();
}
return this;
}
/**
* Set the limit and offset of this statement. Use a negative value for limit to remove the limit. Use a value less
* than one for offset to remove the offset.
*
* @param limit the maximum number of rows this query should return
* @param offset the number of rows this query should skip
* @return this Query object, to allow chaining method calls
*/
public Query limit(int limit, int offset) {
if (immutable) {
return fork().limit(limit, offset);
}
if (this.limit != limit || this.offset != offset) {
this.limit = limit;
this.offset = offset;
invalidateCompileCache();
}
return this;
}
/**
* @return the current limit of this query
*/
public int getLimit() {
return limit;
}
/**
* @return the current offset of this query
*/
public int getOffset() {
return offset;
}
/**
* Mark that this query should be checked for syntactic anomalies in the WHERE clause (e.g. if a raw selection was
* applied)
*/
public void requestValidation() {
this.needsValidation = true;
}
/**
* @return true if this query should be checked for syntactic anomalies in the WHERE clause
*/
public boolean needsValidation() {
return needsValidation;
}
private void updateNeedsValidationFromTable(SqlTable<?> sqlTable) {
if (sqlTable instanceof SubqueryTable) {
if (((SubqueryTable) sqlTable).query.needsValidation()) {
requestValidation();
}
}
}
@Override
public boolean equals(Object o) {
return this == o || !(o == null || getClass() != o.getClass()) && this.toString().equals(o.toString());
}
@Override
public int hashCode() {
return toString().hashCode();
}
public final String sqlForValidation() {
List<Object> argsOrReferences = new ArrayList<Object>();
StringBuilder sql = new StringBuilder(STRING_BUILDER_INITIAL_CAPACITY);
appendCompiledStringWithArguments(sql, argsOrReferences, true);
return new CompiledArgumentResolver(sql.toString(), argsOrReferences).resolveToCompiledStatement().sql;
}
@Override
protected void appendCompiledStringWithArguments(StringBuilder sql, List<Object> selectionArgsBuilder) {
appendCompiledStringWithArguments(sql, selectionArgsBuilder, false);
}
protected void appendCompiledStringWithArguments(StringBuilder sql, List<Object> selectionArgsBuilder,
boolean withValidation) {
visitSelectClause(sql, selectionArgsBuilder);
visitFromClause(sql, selectionArgsBuilder, withValidation);
visitJoinClause(sql, selectionArgsBuilder, withValidation);
visitWhereClause(sql, selectionArgsBuilder, withValidation);
visitGroupByClause(sql, selectionArgsBuilder);
visitCompoundSelectClauses(sql, selectionArgsBuilder, withValidation);
visitOrderByClause(sql, selectionArgsBuilder);
visitLimitClause(sql);
}
private void visitSelectClause(StringBuilder sql, List<Object> selectionArgsBuilder) {
sql.append("SELECT ");
if (distinct) {
sql.append("DISTINCT ");
}
if (isEmpty(fields)) {
// Explicitly add rowid for virtual tables: "select table.rowid as rowid, *"
if (table instanceof VirtualTable) {
VirtualTable virtualTable = (VirtualTable) table;
LongProperty idProperty = virtualTable.getIdProperty();
idProperty.appendCompiledStringWithArguments(sql, selectionArgsBuilder);
sql.append(", ");
}
sql.append("*");
return;
}
SqlUtils.appendConcatenatedCompilables(fields, sql, selectionArgsBuilder, ", ");
}
private void visitFromClause(StringBuilder sql, List<Object> selectionArgsBuilder, boolean withValidation) {
if (table == null) {
return;
}
sql.append(" FROM ");
if (table instanceof SubqueryTable) {
((SubqueryTable) table)
.appendCompiledStringWithArguments(sql, selectionArgsBuilder, withValidation);
} else {
table.appendCompiledStringWithArguments(sql, selectionArgsBuilder);
}
}
private void visitJoinClause(StringBuilder sql, List<Object> selectionArgsBuilder, boolean withValidation) {
if (isEmpty(joins)) {
return;
}
sql.append(" ");
SqlUtils.appendConcatenatedValidatables(joins, sql, selectionArgsBuilder, " ", withValidation);
}
private void visitWhereClause(StringBuilder sql, List<Object> selectionArgsBuilder, boolean withValidation) {
if (isEmpty(criterions)) {
return;
}
sql.append(" WHERE ");
if (withValidation) {
sql.append("(");
}
SqlUtils.appendConcatenatedCompilables(criterions, sql, selectionArgsBuilder, " AND ");
if (withValidation) {
sql.append(")");
}
}
private void visitGroupByClause(StringBuilder sql, List<Object> selectionArgsBuilder) {
if (isEmpty(groupBies)) {
return;
}
sql.append(" GROUP BY");
for (Field<?> groupBy : groupBies) {
sql.append(" ");
groupBy.appendQualifiedExpression(sql, selectionArgsBuilder);
sql.append(",");
}
sql.deleteCharAt(sql.length() - 1);
if (isEmpty(havings)) {
return;
}
sql.append(" HAVING ");
SqlUtils.appendConcatenatedCompilables(havings, sql, selectionArgsBuilder, " AND ");
}
private void visitCompoundSelectClauses(StringBuilder sql, List<Object> selectionArgsBuilder,
boolean withValidation) {
if (isEmpty(compoundSelects)) {
return;
}
sql.append(" ");
SqlUtils.appendConcatenatedValidatables(compoundSelects, sql, selectionArgsBuilder, " ", withValidation);
}
private void visitOrderByClause(StringBuilder sql, List<Object> selectionArgsBuilder) {
if (isEmpty(orders)) {
return;
}
sql.append(" ORDER BY ");
SqlUtils.appendConcatenatedCompilables(orders, sql, selectionArgsBuilder, ", ");
}
private void visitLimitClause(StringBuilder sql) {
if (limit > NO_LIMIT || offset > NO_OFFSET) {
sql.append(" LIMIT ").append(limit);
if (offset > NO_OFFSET) {
sql.append(" OFFSET ").append(offset);
}
}
}
/**
* @return the table being selected from
*/
public SqlTable<?> getTable() {
return this.table;
}
/**
* @return true if a table has been specified
*/
public boolean hasTable() {
return this.table != null;
}
/**
* Alias this query, for use when creating a subquery to select from
*
* @param alias the name for the table when this query is used as a subquery
* @return a {@link SubqueryTable} from this Query
*/
public SubqueryTable as(String alias) {
return SubqueryTable.fromQuery(this, alias);
}
/**
* Alias this query, for use when creating a subquery to select from
*
* @param alias the name for the table when this query is used as a subquery
* @param modelClass the model class representing the subquery
* @return a {@link SubqueryTable} from this Query
*/
public SubqueryTable as(String alias, Class<? extends ViewModel> modelClass, Property<?>[] properties) {
return SubqueryTable.fromQuery(this, alias, modelClass, properties);
}
/**
* @return a new query initialized with the current state of this query. The copy is not entirely shallow--methods
* called on one query will not affect the state of the forked query--but changes to variable arguments in
* {@link Criterion Criterions} they share will affect both copies.
*/
public Query fork() {
Query newQuery = new Query(fields);
newQuery.table = table;
newQuery.criterions = forkList(criterions);
newQuery.joins = forkList(joins);
newQuery.groupBies = forkList(groupBies);
newQuery.compoundSelects = forkList(compoundSelects);
newQuery.orders = forkList(orders);
newQuery.havings = forkList(havings);
newQuery.limit = limit;
newQuery.offset = offset;
newQuery.distinct = distinct;
newQuery.needsValidation = needsValidation;
return newQuery;
}
private boolean isEmpty(List<?> list) {
return list == null || list.isEmpty();
}
private <T> boolean isEmpty(T[] array) {
return array == null || array.length == 0;
}
private <T> ArrayList<T> forkList(ArrayList<T> list) {
if (isEmpty(list)) {
return null;
}
return new ArrayList<T>(list);
}
/**
* Makes the query immutable-ish. Future method calls on this Query object that would mutate it will instead
* {@link #fork() fork} this one, mutate the copy, and return that instead. However, any variable arguments (e.g.
* AtomicReferences in {@link Criterion Criterions}) may still change.
*
* @return this Query object
*/
public Query freeze() {
this.immutable = true;
return this;
}
/**
* @return the {@link Field Fields} this query selects
*/
public List<Field<?>> getFields() {
if (isEmpty(selectAllCache)) {
if (selectAllCache == null) {
selectAllCache = new ArrayList<Field<?>>();
}
if (!isEmpty(fields)) {
selectAllCache.addAll(fields);
} else {
SquidUtilities.addAll(selectAllCache, table.allFields());
if (joins != null) {
for (Join join : joins) {
SquidUtilities.addAll(selectAllCache, join.joinTable.allFields());
}
}
}
}
return new ArrayList<Field<?>>(selectAllCache);
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.flex.compiler.internal.units.requests;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.flex.compiler.internal.workspaces.Workspace;
import org.apache.flex.compiler.units.requests.IRequest;
import org.apache.flex.compiler.units.requests.IRequestResult;
/**
* Creates implementations of the IRequest, where the object returned from the
* get method of the IRequest is returned from a Callable returned from the
* getCallable method implemented by subclasses of this class.
* <p>
* This class hides the complicated process of creating and reusing request
* objects. Sub classes just need to return a Callable from getCallable that
* will return the result of the request.
*
* @param <ResultType> The class that is used for the result of the request.
* @param <RequesteeType> The type of a context parameter to pass from calls to getRequest
* to getCallable.
*/
public abstract class RequestMaker<ResultType extends IRequestResult, RequesteeInterfaceType, RequesteeType extends RequesteeInterfaceType >
{
/**
* If you run with -Dthrow.assertions=true, then an AssertionError
* will not get turned into an internal compiler problem
* and instead will be caught by JUnit as an error.
*/
private static final boolean THROW_ASSERTIONS = System.getProperty("throw.assertions", "false").equals("true");
private static class Request<V extends IRequestResult, W> implements IRequest<V, W>
{
private Future<V> future;
private Lock lock;
private Condition haveFuture;
private final long timestamp;
private final W requestee;
public Request(W requestee)
{
lock = new ReentrantLock();
haveFuture = lock.newCondition();
timestamp = System.currentTimeMillis();
this.requestee = requestee;
}
@Override
public V get() throws InterruptedException
{
V result = null;
try
{
// blocks till request is done
result = getFuture().get();
}
catch (ExecutionException executionException)
{
Throwable cause = executionException.getCause();
if (THROW_ASSERTIONS && (cause instanceof AssertionError))
throw (AssertionError)cause;
/*
* We don't expect to ever get an ExecutionException because we
* eat all the Throwable's that are not the InterruptedException
* in the Callable we wrap around the Callable we got from the
* abstract getCallable method.
*/
assert false : "Unexpected ExecutionException!";
executionException.printStackTrace();
}
return result;
}
@Override
public boolean isDone()
{
lock.lock();
try
{
if (future == null)
return false;
return future.isDone();
}
finally
{
lock.unlock();
}
}
private Future<V> getFuture()
{
lock.lock();
try
{
while (future == null)
haveFuture.awaitUninterruptibly();
return future;
}
finally
{
lock.unlock();
}
}
private void setFuture(Future<V> future)
{
lock.lock();
try
{
this.future = future;
haveFuture.signalAll();
}
finally
{
lock.unlock();
}
}
@Override
public long getTimeStamp()
{
return timestamp;
}
@Override
public W getRequestee()
{
return requestee;
}
}
/**
* Gets a reference to a request object, by either creating a new IRequest
* or returning an existing one from the specified AtomicReference.
* <p>
* If a new IRequest is created the atomicRef is updated to point at it.
*
* @param u Parameter that is passed through to getCallable if a new Request
* is created.
* @param atomicRef An AtomicReference which contains a reference to an
* existing IRequest or which will be updated to point to the IRequest this
* method creates.
* @param workspace The workspace which contains the ExecutorService which
* is used to schedule processing to compute the IRequestResult.
* @param isNeededForFileScope true if the request is needed to build a file
* scope.
* @return An IRequest referenced by the specified AtomicReference, or a new
* IRequest.
*/
public final IRequest<ResultType, RequesteeInterfaceType> getRequest(RequesteeType u, AtomicReference<IRequest<ResultType, RequesteeInterfaceType>> atomicRef, Workspace workspace, boolean isNeededForFileScope)
{
// This check is purely an optimization to avoid the alloc of the request.
final IRequest<ResultType, RequesteeInterfaceType> existingRequest = atomicRef.get();
if (existingRequest == null)
{
workspace.startRequest(isNeededForFileScope);
final Request<ResultType, RequesteeInterfaceType> request = new Request<ResultType, RequesteeInterfaceType>(u);
if (atomicRef.compareAndSet(null, request))
{
ExecutorService exec = workspace.getExecutorService();
request.setFuture(exec.submit(wrapCallable(u, getCallable(u), workspace)));
}
else
{
workspace.endRequest();
}
assert atomicRef.get() != null;
return atomicRef.get();
}
else
{
return existingRequest;
}
}
/**
* Creates a new Callable that calls the specified Callable and catches any
* Throwable's except for a InterruptedException that were not caught by the
* specified Callable. When a Throwable other than InterruptedException was
* not caught specified Callable, the protected abstract
* getResultForThrowable method of this class is called to construct a
* result object for the new Callable.
*
* @param u Parameter that is passed through to getResultForThrowable if the
* specified Callable does not catch a thrown Throwable.
* @param c Callable the resulting Callable calls and that may not catch all
* Throwable that are thrown.
* @param workspace The workspace to notify once this callable finishes.
* @return A new callable that will not throw any Throwable other than
* InterruptedException.
*/
private Callable<ResultType> wrapCallable(final RequesteeType u, final Callable<ResultType> c, final Workspace workspace)
{
return new Callable<ResultType>()
{
@Override
public ResultType call() throws InterruptedException
{
try
{
return c.call();
}
catch (InterruptedException e)
{
throw e;
}
catch (AssertionError ae)
{
if (THROW_ASSERTIONS)
throw ae;
return getResultForThrowable(u, ae);
}
catch (Exception e)
{
return getResultForThrowable(u, e);
}
finally
{
workspace.endRequest();
}
}
};
}
/**
* Called to get the callable that computes the result of the request.
*
* @param u Parameter that was passed to getRequest.
* @return A Callable that returns the result of the request.
*/
protected abstract Callable<ResultType> getCallable(RequesteeType u);
/**
* Called to get the Result when an uncaught throwable was detected.
*
* @param u Parameter that was passed to getRequest.
* @param t Throwable that was thrown from the Callable returned from
* getCallable.
*/
protected abstract ResultType getResultForThrowable(RequesteeType u, Throwable t);
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.automl.v1;
import com.google.api.pathtemplate.PathTemplate;
import com.google.api.resourcenames.ResourceName;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
@Generated("by gapic-generator-java")
public class ModelEvaluationName implements ResourceName {
private static final PathTemplate PROJECT_LOCATION_MODEL_MODEL_EVALUATION =
PathTemplate.createWithoutUrlEncoding(
"projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}");
private volatile Map<String, String> fieldValuesMap;
private final String project;
private final String location;
private final String model;
private final String modelEvaluation;
@Deprecated
protected ModelEvaluationName() {
project = null;
location = null;
model = null;
modelEvaluation = null;
}
private ModelEvaluationName(Builder builder) {
project = Preconditions.checkNotNull(builder.getProject());
location = Preconditions.checkNotNull(builder.getLocation());
model = Preconditions.checkNotNull(builder.getModel());
modelEvaluation = Preconditions.checkNotNull(builder.getModelEvaluation());
}
public String getProject() {
return project;
}
public String getLocation() {
return location;
}
public String getModel() {
return model;
}
public String getModelEvaluation() {
return modelEvaluation;
}
public static Builder newBuilder() {
return new Builder();
}
public Builder toBuilder() {
return new Builder(this);
}
public static ModelEvaluationName of(
String project, String location, String model, String modelEvaluation) {
return newBuilder()
.setProject(project)
.setLocation(location)
.setModel(model)
.setModelEvaluation(modelEvaluation)
.build();
}
public static String format(
String project, String location, String model, String modelEvaluation) {
return newBuilder()
.setProject(project)
.setLocation(location)
.setModel(model)
.setModelEvaluation(modelEvaluation)
.build()
.toString();
}
public static ModelEvaluationName parse(String formattedString) {
if (formattedString.isEmpty()) {
return null;
}
Map<String, String> matchMap =
PROJECT_LOCATION_MODEL_MODEL_EVALUATION.validatedMatch(
formattedString, "ModelEvaluationName.parse: formattedString not in valid format");
return of(
matchMap.get("project"),
matchMap.get("location"),
matchMap.get("model"),
matchMap.get("model_evaluation"));
}
public static List<ModelEvaluationName> parseList(List<String> formattedStrings) {
List<ModelEvaluationName> list = new ArrayList<>(formattedStrings.size());
for (String formattedString : formattedStrings) {
list.add(parse(formattedString));
}
return list;
}
public static List<String> toStringList(List<ModelEvaluationName> values) {
List<String> list = new ArrayList<>(values.size());
for (ModelEvaluationName value : values) {
if (value == null) {
list.add("");
} else {
list.add(value.toString());
}
}
return list;
}
public static boolean isParsableFrom(String formattedString) {
return PROJECT_LOCATION_MODEL_MODEL_EVALUATION.matches(formattedString);
}
@Override
public Map<String, String> getFieldValuesMap() {
if (fieldValuesMap == null) {
synchronized (this) {
if (fieldValuesMap == null) {
ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder();
if (project != null) {
fieldMapBuilder.put("project", project);
}
if (location != null) {
fieldMapBuilder.put("location", location);
}
if (model != null) {
fieldMapBuilder.put("model", model);
}
if (modelEvaluation != null) {
fieldMapBuilder.put("model_evaluation", modelEvaluation);
}
fieldValuesMap = fieldMapBuilder.build();
}
}
}
return fieldValuesMap;
}
public String getFieldValue(String fieldName) {
return getFieldValuesMap().get(fieldName);
}
@Override
public String toString() {
return PROJECT_LOCATION_MODEL_MODEL_EVALUATION.instantiate(
"project",
project,
"location",
location,
"model",
model,
"model_evaluation",
modelEvaluation);
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o != null || getClass() == o.getClass()) {
ModelEvaluationName that = ((ModelEvaluationName) o);
return Objects.equals(this.project, that.project)
&& Objects.equals(this.location, that.location)
&& Objects.equals(this.model, that.model)
&& Objects.equals(this.modelEvaluation, that.modelEvaluation);
}
return false;
}
@Override
public int hashCode() {
int h = 1;
h *= 1000003;
h ^= Objects.hashCode(project);
h *= 1000003;
h ^= Objects.hashCode(location);
h *= 1000003;
h ^= Objects.hashCode(model);
h *= 1000003;
h ^= Objects.hashCode(modelEvaluation);
return h;
}
/**
* Builder for
* projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}.
*/
public static class Builder {
private String project;
private String location;
private String model;
private String modelEvaluation;
protected Builder() {}
public String getProject() {
return project;
}
public String getLocation() {
return location;
}
public String getModel() {
return model;
}
public String getModelEvaluation() {
return modelEvaluation;
}
public Builder setProject(String project) {
this.project = project;
return this;
}
public Builder setLocation(String location) {
this.location = location;
return this;
}
public Builder setModel(String model) {
this.model = model;
return this;
}
public Builder setModelEvaluation(String modelEvaluation) {
this.modelEvaluation = modelEvaluation;
return this;
}
private Builder(ModelEvaluationName modelEvaluationName) {
this.project = modelEvaluationName.project;
this.location = modelEvaluationName.location;
this.model = modelEvaluationName.model;
this.modelEvaluation = modelEvaluationName.modelEvaluation;
}
public ModelEvaluationName build() {
return new ModelEvaluationName(this);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.test.performance.scan;
import java.io.IOException;
import java.net.InetAddress;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.accumulo.core.client.Accumulo;
import org.apache.accumulo.core.client.AccumuloClient;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.clientImpl.ClientContext;
import org.apache.accumulo.core.clientImpl.Tables;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.conf.IterConfigUtil;
import org.apache.accumulo.core.conf.IterLoad;
import org.apache.accumulo.core.crypto.CryptoServiceFactory;
import org.apache.accumulo.core.data.ArrayByteSequence;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Column;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.accumulo.core.dataImpl.thrift.IterInfo;
import org.apache.accumulo.core.file.FileOperations;
import org.apache.accumulo.core.file.FileSKVIterator;
import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;
import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
import org.apache.accumulo.core.iterators.SortedMapIterator;
import org.apache.accumulo.core.iteratorsImpl.system.ColumnFamilySkippingIterator;
import org.apache.accumulo.core.iteratorsImpl.system.ColumnQualifierFilter;
import org.apache.accumulo.core.iteratorsImpl.system.DeletingIterator;
import org.apache.accumulo.core.iteratorsImpl.system.DeletingIterator.Behavior;
import org.apache.accumulo.core.iteratorsImpl.system.MultiIterator;
import org.apache.accumulo.core.iteratorsImpl.system.VisibilityFilter;
import org.apache.accumulo.core.metadata.MetadataServicer;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.util.HostAndPort;
import org.apache.accumulo.core.util.Stat;
import org.apache.accumulo.server.ServerContext;
import org.apache.accumulo.server.cli.ServerUtilOpts;
import org.apache.accumulo.server.conf.ServerConfigurationFactory;
import org.apache.accumulo.server.conf.TableConfiguration;
import org.apache.accumulo.server.fs.FileRef;
import org.apache.accumulo.server.fs.VolumeManager;
import org.apache.accumulo.server.util.MetadataTableUtil;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.beust.jcommander.Parameter;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
public class CollectTabletStats {
private static final Logger log = LoggerFactory.getLogger(CollectTabletStats.class);
static class CollectOptions extends ServerUtilOpts {
@Parameter(names = {"-t", "--table"}, required = true, description = "table to use")
String tableName;
@Parameter(names = "--iterations", description = "number of iterations")
int iterations = 3;
@Parameter(names = "--numThreads", description = "number of threads")
int numThreads = 1;
@Parameter(names = "-f", description = "select far tablets, default is to use local tablets")
boolean selectFarTablets = false;
@Parameter(names = "--columns", description = "comma separated list of columns")
String columns;
}
public static void main(String[] args) throws Exception {
final CollectOptions opts = new CollectOptions();
opts.parseArgs(CollectTabletStats.class.getName(), args);
String[] columnsTmp = {};
if (opts.columns != null)
columnsTmp = opts.columns.split(",");
final String[] columns = columnsTmp;
ServerContext context = opts.getServerContext();
final VolumeManager fs = context.getVolumeManager();
ServerConfigurationFactory sconf = context.getServerConfFactory();
TableId tableId = Tables.getTableId(context, opts.tableName);
if (tableId == null) {
log.error("Unable to find table named {}", opts.tableName);
System.exit(-1);
}
TreeMap<KeyExtent,String> tabletLocations = new TreeMap<>();
List<KeyExtent> candidates =
findTablets(context, !opts.selectFarTablets, opts.tableName, tabletLocations);
if (candidates.size() < opts.numThreads) {
System.err.println("ERROR : Unable to find " + opts.numThreads + " "
+ (opts.selectFarTablets ? "far" : "local") + " tablets");
System.exit(-1);
}
List<KeyExtent> tabletsToTest = selectRandomTablets(opts.numThreads, candidates);
Map<KeyExtent,List<FileRef>> tabletFiles = new HashMap<>();
for (KeyExtent ke : tabletsToTest) {
List<FileRef> files = getTabletFiles(context, ke);
tabletFiles.put(ke, files);
}
System.out.println();
System.out.println("run location : " + InetAddress.getLocalHost().getHostName() + "/"
+ InetAddress.getLocalHost().getHostAddress());
System.out.println("num threads : " + opts.numThreads);
System.out.println("table : " + opts.tableName);
System.out.println("table id : " + tableId);
for (KeyExtent ke : tabletsToTest) {
System.out.println("\t *** Information about tablet " + ke.getUUID() + " *** ");
System.out.println("\t\t# files in tablet : " + tabletFiles.get(ke).size());
System.out.println("\t\ttablet location : " + tabletLocations.get(ke));
reportHdfsBlockLocations(context, tabletFiles.get(ke));
}
System.out.println("%n*** RUNNING TEST ***%n");
ExecutorService threadPool = Executors.newFixedThreadPool(opts.numThreads);
for (int i = 0; i < opts.iterations; i++) {
ArrayList<Test> tests = new ArrayList<>();
for (final KeyExtent ke : tabletsToTest) {
final List<FileRef> files = tabletFiles.get(ke);
Test test = new Test(ke) {
@Override
public int runTest() throws Exception {
return readFiles(fs, sconf.getSystemConfiguration(), files, ke, columns);
}
};
tests.add(test);
}
runTest("read files", tests, opts.numThreads, threadPool);
}
for (int i = 0; i < opts.iterations; i++) {
ArrayList<Test> tests = new ArrayList<>();
for (final KeyExtent ke : tabletsToTest) {
final List<FileRef> files = tabletFiles.get(ke);
Test test = new Test(ke) {
@Override
public int runTest() throws Exception {
return readFilesUsingIterStack(fs, sconf, files, opts.auths, ke, columns, false);
}
};
tests.add(test);
}
runTest("read tablet files w/ system iter stack", tests, opts.numThreads, threadPool);
}
for (int i = 0; i < opts.iterations; i++) {
ArrayList<Test> tests = new ArrayList<>();
for (final KeyExtent ke : tabletsToTest) {
final List<FileRef> files = tabletFiles.get(ke);
Test test = new Test(ke) {
@Override
public int runTest() throws Exception {
return readFilesUsingIterStack(fs, sconf, files, opts.auths, ke, columns, true);
}
};
tests.add(test);
}
runTest("read tablet files w/ table iter stack", tests, opts.numThreads, threadPool);
}
try (AccumuloClient client = Accumulo.newClient().from(opts.getClientProps()).build()) {
for (int i = 0; i < opts.iterations; i++) {
ArrayList<Test> tests = new ArrayList<>();
for (final KeyExtent ke : tabletsToTest) {
Test test = new Test(ke) {
@Override
public int runTest() throws Exception {
return scanTablet(client, opts.tableName, opts.auths, ke.getPrevEndRow(),
ke.getEndRow(), columns);
}
};
tests.add(test);
}
runTest("read tablet data through accumulo", tests, opts.numThreads, threadPool);
}
for (final KeyExtent ke : tabletsToTest) {
threadPool.submit(() -> {
try {
calcTabletStats(client, opts.tableName, opts.auths, ke, columns);
} catch (Exception e) {
log.error("Failed to calculate tablet stats.", e);
}
});
}
}
threadPool.shutdown();
}
private abstract static class Test implements Runnable {
private int count;
private long t1;
private long t2;
private CountDownLatch startCdl, finishCdl;
private KeyExtent ke;
Test(KeyExtent ke) {
this.ke = ke;
}
public abstract int runTest() throws Exception;
void setSignals(CountDownLatch scdl, CountDownLatch fcdl) {
this.startCdl = scdl;
this.finishCdl = fcdl;
}
@Override
public void run() {
try {
startCdl.await();
} catch (InterruptedException e) {
log.error("startCdl.await() failed.", e);
}
t1 = System.currentTimeMillis();
try {
count = runTest();
} catch (Exception e) {
log.error("runTest() failed.", e);
}
t2 = System.currentTimeMillis();
double time = (t2 - t1) / 1000.0;
System.out.printf(
"\t\ttablet: " + ke.getUUID() + " thread: " + Thread.currentThread().getId()
+ " count: %,d cells time: %6.2f rate: %,6.2f cells/sec%n",
count, time, count / time);
finishCdl.countDown();
}
int getCount() {
return count;
}
long getStartTime() {
return t1;
}
long getFinishTime() {
return t2;
}
}
@SuppressFBWarnings(value = "DM_GC", justification = "gc is okay for test")
private static void runTest(String desc, List<Test> tests, int numThreads,
ExecutorService threadPool) throws Exception {
System.out.println("\tRunning test : " + desc);
CountDownLatch startSignal = new CountDownLatch(1);
CountDownLatch finishedSignal = new CountDownLatch(numThreads);
for (Test test : tests) {
threadPool.submit(test);
test.setSignals(startSignal, finishedSignal);
}
startSignal.countDown();
finishedSignal.await();
long minTime = Long.MAX_VALUE;
long maxTime = Long.MIN_VALUE;
long count = 0;
for (Test test : tests) {
minTime = Math.min(test.getStartTime(), minTime);
maxTime = Math.max(test.getFinishTime(), maxTime);
count += test.getCount();
}
double time = (maxTime - minTime) / 1000.0;
System.out.printf("\tAggregate stats count: %,d cells time: %6.2f rate: %,6.2f cells/sec%n",
count, time, count / time);
System.out.println();
// run the gc between test so that object created during previous test are not
// collected in following test
System.gc();
System.gc();
System.gc();
}
private static List<KeyExtent> findTablets(ClientContext context, boolean selectLocalTablets,
String tableName, SortedMap<KeyExtent,String> tabletLocations) throws Exception {
TableId tableId = Tables.getTableId(context, tableName);
MetadataServicer.forTableId(context, tableId).getTabletLocations(tabletLocations);
InetAddress localaddress = InetAddress.getLocalHost();
List<KeyExtent> candidates = new ArrayList<>();
for (Entry<KeyExtent,String> entry : tabletLocations.entrySet()) {
String loc = entry.getValue();
if (loc != null) {
boolean isLocal =
HostAndPort.fromString(entry.getValue()).getHost().equals(localaddress.getHostName());
if (selectLocalTablets && isLocal) {
candidates.add(entry.getKey());
} else if (!selectLocalTablets && !isLocal) {
candidates.add(entry.getKey());
}
}
}
return candidates;
}
private static List<KeyExtent> selectRandomTablets(int numThreads, List<KeyExtent> candidates) {
List<KeyExtent> tabletsToTest = new ArrayList<>();
Random rand = new SecureRandom();
for (int i = 0; i < numThreads; i++) {
int rindex = rand.nextInt(candidates.size());
tabletsToTest.add(candidates.get(rindex));
Collections.swap(candidates, rindex, candidates.size() - 1);
candidates = candidates.subList(0, candidates.size() - 1);
}
return tabletsToTest;
}
private static List<FileRef> getTabletFiles(ServerContext context, KeyExtent ke)
throws IOException {
return new ArrayList<>(
MetadataTableUtil.getFileAndLogEntries(context, ke).getSecond().keySet());
}
private static void reportHdfsBlockLocations(ServerContext context, List<FileRef> files)
throws Exception {
VolumeManager fs = context.getVolumeManager();
System.out.println("\t\tFile block report : ");
for (FileRef file : files) {
FileStatus status = fs.getFileStatus(file.path());
if (status.isDirectory()) {
// assume it is a map file
status = fs.getFileStatus(new Path(file + "/data"));
}
FileSystem ns = fs.getVolumeByPath(file.path()).getFileSystem();
BlockLocation[] locs = ns.getFileBlockLocations(status, 0, status.getLen());
System.out.println("\t\t\tBlocks for : " + file);
for (BlockLocation blockLocation : locs) {
System.out.printf("\t\t\t\t offset : %,13d hosts :", blockLocation.getOffset());
for (String host : blockLocation.getHosts()) {
System.out.print(" " + host);
}
System.out.println();
}
}
System.out.println();
}
private static SortedKeyValueIterator<Key,Value> createScanIterator(KeyExtent ke,
Collection<SortedKeyValueIterator<Key,Value>> mapfiles, Authorizations authorizations,
byte[] defaultLabels, HashSet<Column> columnSet, List<IterInfo> ssiList,
Map<String,Map<String,String>> ssio, boolean useTableIterators, TableConfiguration conf)
throws IOException {
SortedMapIterator smi = new SortedMapIterator(new TreeMap<>());
List<SortedKeyValueIterator<Key,Value>> iters = new ArrayList<>(mapfiles.size() + 1);
iters.addAll(mapfiles);
iters.add(smi);
MultiIterator multiIter = new MultiIterator(iters, ke);
SortedKeyValueIterator<Key,Value> delIter =
DeletingIterator.wrap(multiIter, false, Behavior.PROCESS);
ColumnFamilySkippingIterator cfsi = new ColumnFamilySkippingIterator(delIter);
SortedKeyValueIterator<Key,Value> colFilter = ColumnQualifierFilter.wrap(cfsi, columnSet);
SortedKeyValueIterator<Key,Value> visFilter =
VisibilityFilter.wrap(colFilter, authorizations, defaultLabels);
if (useTableIterators) {
IterLoad il = IterConfigUtil.loadIterConf(IteratorScope.scan, ssiList, ssio, conf);
return IterConfigUtil.loadIterators(visFilter, il.useAccumuloClassLoader(true));
}
return visFilter;
}
private static int readFiles(VolumeManager fs, AccumuloConfiguration aconf, List<FileRef> files,
KeyExtent ke, String[] columns) throws Exception {
int count = 0;
HashSet<ByteSequence> columnSet = createColumnBSS(columns);
for (FileRef file : files) {
FileSystem ns = fs.getVolumeByPath(file.path()).getFileSystem();
FileSKVIterator reader = FileOperations.getInstance().newReaderBuilder()
.forFile(file.path().toString(), ns, ns.getConf(),
CryptoServiceFactory.newDefaultInstance())
.withTableConfiguration(aconf).build();
Range range = new Range(ke.getPrevEndRow(), false, ke.getEndRow(), true);
reader.seek(range, columnSet, columnSet.size() != 0);
while (reader.hasTop() && !range.afterEndKey(reader.getTopKey())) {
count++;
reader.next();
}
reader.close();
}
return count;
}
private static HashSet<ByteSequence> createColumnBSS(String[] columns) {
HashSet<ByteSequence> columnSet = new HashSet<>();
for (String c : columns) {
columnSet.add(new ArrayByteSequence(c));
}
return columnSet;
}
private static int readFilesUsingIterStack(VolumeManager fs, ServerConfigurationFactory aconf,
List<FileRef> files, Authorizations auths, KeyExtent ke, String[] columns,
boolean useTableIterators) throws Exception {
SortedKeyValueIterator<Key,Value> reader;
List<SortedKeyValueIterator<Key,Value>> readers = new ArrayList<>(files.size());
for (FileRef file : files) {
FileSystem ns = fs.getVolumeByPath(file.path()).getFileSystem();
readers.add(FileOperations.getInstance().newReaderBuilder()
.forFile(file.path().toString(), ns, ns.getConf(),
CryptoServiceFactory.newDefaultInstance())
.withTableConfiguration(aconf.getSystemConfiguration()).build());
}
List<IterInfo> emptyIterinfo = Collections.emptyList();
Map<String,Map<String,String>> emptySsio = Collections.emptyMap();
TableConfiguration tconf = aconf.getTableConfiguration(ke.getTableId());
reader = createScanIterator(ke, readers, auths, new byte[] {}, new HashSet<>(), emptyIterinfo,
emptySsio, useTableIterators, tconf);
HashSet<ByteSequence> columnSet = createColumnBSS(columns);
reader.seek(new Range(ke.getPrevEndRow(), false, ke.getEndRow(), true), columnSet,
columnSet.size() != 0);
int count = 0;
while (reader.hasTop()) {
count++;
reader.next();
}
return count;
}
private static int scanTablet(AccumuloClient client, String table, Authorizations auths,
Text prevEndRow, Text endRow, String[] columns) throws Exception {
try (Scanner scanner = client.createScanner(table, auths)) {
scanner.setRange(new Range(prevEndRow, false, endRow, true));
for (String c : columns) {
scanner.fetchColumnFamily(new Text(c));
}
int count = 0;
for (Entry<Key,Value> entry : scanner) {
if (entry != null)
count++;
}
return count;
}
}
private static void calcTabletStats(AccumuloClient client, String table, Authorizations auths,
KeyExtent ke, String[] columns) throws Exception {
// long t1 = System.currentTimeMillis();
try (Scanner scanner = client.createScanner(table, auths)) {
scanner.setRange(new Range(ke.getPrevEndRow(), false, ke.getEndRow(), true));
for (String c : columns) {
scanner.fetchColumnFamily(new Text(c));
}
Stat rowLen = new Stat();
Stat cfLen = new Stat();
Stat cqLen = new Stat();
Stat cvLen = new Stat();
Stat valLen = new Stat();
Stat colsPerRow = new Stat();
Text lastRow = null;
int colsPerRowCount = 0;
for (Entry<Key,Value> entry : scanner) {
Key key = entry.getKey();
Text row = key.getRow();
if (lastRow == null) {
lastRow = row;
}
if (!lastRow.equals(row)) {
colsPerRow.addStat(colsPerRowCount);
lastRow = row;
colsPerRowCount = 0;
}
colsPerRowCount++;
rowLen.addStat(row.getLength());
cfLen.addStat(key.getColumnFamilyData().length());
cqLen.addStat(key.getColumnQualifierData().length());
cvLen.addStat(key.getColumnVisibilityData().length());
valLen.addStat(entry.getValue().get().length);
}
synchronized (System.out) {
System.out.println("");
System.out.println("\tTablet " + ke.getUUID() + " statistics : ");
printStat("Row length", rowLen);
printStat("Column family length", cfLen);
printStat("Column qualifier length", cqLen);
printStat("Column visibility length", cvLen);
printStat("Value length", valLen);
printStat("Columns per row", colsPerRow);
System.out.println("");
}
}
}
private static void printStat(String desc, Stat s) {
System.out.printf("\t\tDescription: [%30s] average: %,6.2f min: %,d max: %,d %n", desc,
s.mean(), s.min(), s.max());
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ui.tree;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.util.Disposer;
import com.intellij.util.concurrency.Invoker;
import com.intellij.util.concurrency.InvokerSupplier;
import com.intellij.util.ui.tree.AbstractTreeModel;
import com.intellij.util.ui.tree.TreeModelAdapter;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.concurrency.AsyncPromise;
import org.junit.Test;
import javax.swing.*;
import javax.swing.event.TreeModelEvent;
import javax.swing.event.TreeModelListener;
import javax.swing.tree.*;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Objects;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
import java.util.function.Supplier;
import static com.intellij.diagnostic.ThreadDumper.dumpThreadsToString;
import static com.intellij.util.ArrayUtil.EMPTY_OBJECT_ARRAY;
import static com.intellij.util.ui.tree.TreeUtil.expandAll;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.*;
public final class AsyncTreeModelTest {
/**
* A bigger threshold increases a probability of restarting current task.
*/
private static final double THRESHOLD = .001;
/**
* Set to true to print some debugging information.
*/
private static final boolean PRINT = false;
@Test
public void testAggressiveUpdating() {
testBackgroundThread(() -> null, test -> test.updateModelAndWait(model -> {
for (int i = 0; i < 10000; i++) ((DefaultTreeModel)model).setRoot(createRoot());
}, test::done), false, 0);
}
@Test
public void testProcessingNPE() {
Disposable disposable = Disposer.newDisposable();
AsyncTreeModel model = new AsyncTreeModel(new DefaultTreeModel(new DefaultMutableTreeNode()), disposable);
try {
assert !model.isProcessing() : "created model should not update content";
}
finally {
Disposer.dispose(disposable);
}
}
@Test
public void testNullRoot() {
testAsync(() -> null, test
-> testPathState0(test.tree, ()
-> test.updateModelAndWait(model -> ((DefaultTreeModel)model).setRoot(createRoot()), ()
-> testPathState1(test.tree, test::done))));
}
@Test
public void testRootOnly() {
testAsync(AsyncTreeModelTest::createRoot, test
-> testPathState1(test.tree, ()
-> test.updateModelAndWait(model -> ((DefaultTreeModel)model).setRoot(null), ()
-> testPathState0(test.tree, test::done))));
}
@Test
public void testRootOnlyUpdate() {
testRootOnlyUpdate(false);
testRootOnlyUpdate(true);
}
private static void testRootOnlyUpdate(boolean mutable) {
TreeNode first = new Node("root", mutable);
TreeNode second = new Node("root", mutable);
assert mutable == first.equals(second);
assert first != second : "both nodes should not be the same";
testAsync(() -> first, test
-> testPathState1(test.tree, ()
-> testRootOnlyUpdate(test, first, ()
-> test.updateModelAndWait(model -> ((DefaultTreeModel)model).setRoot(second), ()
-> testPathState1(test.tree, ()
-> testRootOnlyUpdate(test, second, test::done))))));
}
private static void testRootOnlyUpdate(@NotNull ModelTest test, @NotNull TreeNode expected, @NotNull Runnable task) {
Object actual = test.tree.getModel().getRoot();
assert expected.equals(actual) : "expected node should be equal to the tree root";
assert expected == actual : "expected node should be the same";
task.run();
}
@Test
public void testChildrenUpdate() {
ArrayList<TreePath> list = new ArrayList<>();
testAsync(AsyncTreeModelTest::createMutableRoot, test
-> expandAll(test.tree, ()
-> testPathState(test.tree, " +'root'\n" + MUTABLE_CHILDREN, ()
-> collectTreePaths(test.tree, list, ()
-> test.updateModelAndWait(model -> ((DefaultTreeModel)model).setRoot(createMutableRoot()), ()
-> testPathState(test.tree, " +'root'\n" + MUTABLE_CHILDREN, ()
-> checkTreePaths(test.tree, list, test::done)))))));
}
private static void collectTreePaths(@NotNull JTree tree, @NotNull ArrayList<TreePath> list, @NotNull Runnable task) {
list.clear();
forEachRow(tree, list::add);
task.run();
}
private static void checkTreePaths(@NotNull JTree tree, @NotNull ArrayList<TreePath> list, @NotNull Runnable task) {
Iterator<TreePath> iterator = list.iterator();
forEachRow(tree, path -> {
assertTrue(iterator.hasNext());
assertTreePath(path, iterator.next());
});
assertFalse(iterator.hasNext());
task.run();
}
private static void assertTreePath(@NotNull TreePath expected, @NotNull TreePath actual) {
assertEquals("expected path should be equal to the actual path", expected, actual);
// do no validate root node, because it is not updated in Swing's viewable row list
if (expected.getParentPath() == null && actual.getParentPath() == null) return;
assertComponent(expected.getLastPathComponent(), actual.getLastPathComponent());
assertTreePath(expected.getParentPath(), actual.getParentPath());
}
private static void assertComponent(@NotNull Object expected, @NotNull Object actual) {
assertEquals("expected node should be equal to the actual node", expected, actual);
assertNotSame(expected, actual);
}
@NotNull
private static TreeNode createMutableRoot() {
return new Node(true, "root",
new Node(true, "color", "red", "green", "blue"),
new Node(true, "greek", "alpha", "beta", "gamma"));
}
private static final String MUTABLE_CHILDREN
= " +'color'\n" +
" 'red'\n" +
" 'green'\n" +
" 'blue'\n" +
" +'greek'\n" +
" 'alpha'\n" +
" 'beta'\n" +
" 'gamma'\n";
@Test
public void testChildren() {
TreeNode color = createColorNode();
TreeNode digit = createDigitNode();
TreeNode greek = createGreekNode();
TreeNode root = new Node("root", color, digit, greek);
TreePath path = new TreePath(root);
testAsync(() -> root, test
-> testPathState(test.tree, " +'root'\n" + CHILDREN, ()
-> test.collapse(path, ()
-> testPathState1(test.tree, ()
-> test.setRootVisible(false, ()
-> testPathState0(test.tree, ()
-> test.expand(path, ()
-> testPathState(test.tree, CHILDREN, ()
-> test.expand(path.pathByAddingChild(color), ()
-> testPathState(test.tree, CHILDREN_COLOR, ()
-> test.expand(path.pathByAddingChild(greek), ()
-> testPathState(test.tree, CHILDREN_COLOR_GREEK, ()
-> test.collapse(path, ()
-> testPathState0(test.tree, ()
-> test.setRootVisible(true, ()
-> testPathState1(test.tree, ()
-> test.expand(path, ()
-> testPathState(test.tree, " +'root'\n" + CHILDREN_COLOR_GREEK, test::done))))))))))))))))));
}
private static final String CHILDREN
= " 'color'\n" +
" 'digit'\n" +
" 'greek'\n";
private static final String CHILDREN_COLOR
= " +'color'\n" +
" 'red'\n" +
" 'green'\n" +
" 'blue'\n" +
" 'digit'\n" +
" 'greek'\n";
private static final String CHILDREN_COLOR_GREEK
= " +'color'\n" +
" 'red'\n" +
" 'green'\n" +
" 'blue'\n" +
" 'digit'\n" +
" +'greek'\n" +
" 'alpha'\n" +
" 'beta'\n" +
" 'gamma'\n" +
" 'delta'\n" +
" 'epsilon'\n";
@Test
public void testChildrenResolve() {
Node node = new Node("node");
Node root = new Node("root", new Node("upper", new Node("middle", new Node("lower", node))));
TreePath tp = TreePathUtil.convertArrayToTreePath(node.getPath());
testAsync(() -> root, test
-> testPathState(test.tree, " +'root'\n 'upper'\n", ()
-> test.resolve(tp, path
-> test.expand(path.getParentPath(), () // expand parent path because leaf nodes are ignored
-> testPathState(test.tree, " +'root'\n +'upper'\n +'middle'\n +'lower'\n 'node'\n", test::done)))));
}
@Test
public void testChildrenVisit() {
Node node = new Node("node");
Node root = new Node("root", new Node("upper", new Node("middle", new Node("lower", node))));
TreePath tp = TreePathUtil.convertArrayToTreePath(node.getPath(), Object::toString);
testAsync(() -> root, test
-> testPathState(test.tree, " +'root'\n 'upper'\n", ()
-> test.visit(new TreeVisitor.ByTreePath<>(tp, Object::toString), true, path
-> test.expand(path.getParentPath(), () // expand parent path because leaf nodes are ignored
-> testPathState(test.tree, " +'root'\n +'upper'\n +'middle'\n +'lower'\n 'node'\n", test::done)))));
}
@Test
public void testChildrenVisitWithoutLoading() {
Node node = new Node("node");
Node root = new Node("root", new Node("upper", new Node("middle", new Node("lower", node))));
TreePath tp = TreePathUtil.convertArrayToTreePath(node.getPath(), Object::toString);
testAsync(() -> root, test
-> testPathState(test.tree, " +'root'\n 'upper'\n", ()
-> test.visit(new TreeVisitor.ByTreePath<>(tp, Object::toString), false, path -> {
assertNull(path);
test.done();
})));
}
@Test
public void testCollapsedNodeUpdateIfChildrenNotLoaded() {
TreeNode color = createColorNode();
TreeNode digit = createDigitNode();
TreeNode greek = createGreekNode();
TreeNode root = new Node("root", color, digit, greek);
TreePath path = new TreePath(root).pathByAddingChild(greek);
testAsync(() -> root, test
-> testPathState(test.tree, " +'root'\n" + CHILDREN, ()
-> test.fireStructureChanged(path)));
}
@Test
public void testCollapsedNodeUpdateIfChildrenLoaded() {
TreeNode color = createColorNode();
TreeNode digit = createDigitNode();
TreeNode greek = createGreekNode();
TreeNode root = new Node("root", color, digit, greek);
TreePath path = new TreePath(root);
testAsync(() -> root, test
-> testPathState(test.tree, " +'root'\n" + CHILDREN, ()
-> test.collapse(path, ()
-> testPathState1(test.tree, ()
-> test.fireStructureChanged(path)))));
}
@Test
public void testExpandedNodeUpdateIfChildrenLoaded() {
TreeNode color = createColorNode();
TreeNode digit = createDigitNode();
TreeNode greek = createGreekNode();
TreeNode root = new Node("root", color, digit, greek);
TreePath path = new TreePath(root);
testAsync(() -> root, test
-> testPathState(test.tree, " +'root'\n" + CHILDREN, ()
-> test.fireStructureChanged(path)));
}
@NotNull
private static TreeNode createRoot() {
return new Node("root");
}
@NotNull
private static TreeNode createColorNode() {
return new Node("color", "red", "green", "blue");
}
@NotNull
private static TreeNode createDigitNode() {
return new Node("digit", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine");
}
@NotNull
private static TreeNode createGreekNode() {
return new Node("greek", "alpha", "beta", "gamma", "delta", "epsilon");
}
private static void testPathState(@NotNull JTree tree, @NotNull String state, @NotNull Runnable task) {
assertEquals("unexpected tree state", state, getPathState(tree));
task.run();
}
private static void testPathState0(@NotNull JTree tree, @NotNull Runnable task) {
assert 0 == tree.getRowCount() : "tree should have no nodes";
testPathState(tree, "", task);
}
private static void testPathState1(@NotNull JTree tree, @NotNull Runnable task) {
assert 1 == tree.getRowCount() : "tree should have only one node";
testPathState(tree, " 'root'\n", task);
}
private static void forEachRow(JTree tree, Consumer<TreePath> consumer) {
int count = tree.getRowCount();
for (int row = 0; row < count; row++) {
consumer.accept(tree.getPathForRow(row));
}
}
@NotNull
private static String getPathState(JTree tree) {
StringBuilder sb = new StringBuilder();
forEachRow(tree, path -> addState(sb, tree, path));
return sb.toString();
}
private static void addState(StringBuilder sb, JTree tree, TreePath path) {
boolean expanded = tree.isExpanded(path);
boolean selected = tree.isPathSelected(path);
sb.append(selected ? '[' : ' ');
int count = path.getPathCount();
while (0 < count--) sb.append(" ");
sb.append(expanded ? '+' : ' ');
sb.append(path.getLastPathComponent());
if (selected) sb.append(']');
sb.append("\n");
}
private static void testAsync(Supplier<TreeNode> root, @NotNull Consumer<ModelTest> consumer) {
testAsync(root, consumer, false);
testAsync(root, consumer, true);
}
private static void testAsync(Supplier<TreeNode> root, @NotNull Consumer<ModelTest> consumer, boolean showLoadingNode) {
testEventDispatchThread(root, consumer, showLoadingNode);
testBackgroundThread(root, consumer, showLoadingNode);
testBackgroundPool(root, consumer, showLoadingNode);
}
private static void testEventDispatchThread(Supplier<TreeNode> root, Consumer<ModelTest> consumer, boolean showLoadingNode) {
testEventDispatchThread(root, consumer, showLoadingNode, TreeTest.FAST);
testEventDispatchThread(root, consumer, showLoadingNode, TreeTest.SLOW);
}
private static void testEventDispatchThread(Supplier<TreeNode> root, Consumer<ModelTest> consumer, boolean showLoadingNode, int delay) {
new AsyncTest(showLoadingNode, new EventDispatchThreadModel(delay, root)).start(consumer, getSecondsToWait(delay));
}
private static void testBackgroundThread(Supplier<TreeNode> root, Consumer<ModelTest> consumer, boolean showLoadingNode) {
testBackgroundThread(root, consumer, showLoadingNode, TreeTest.FAST);
testBackgroundThread(root, consumer, showLoadingNode, TreeTest.SLOW);
}
private static void testBackgroundThread(Supplier<TreeNode> root, Consumer<ModelTest> consumer, boolean showLoadingNode, int delay) {
if (consumer != null) new AsyncTest(showLoadingNode, new BackgroundThreadModel(delay, root)).start(consumer, getSecondsToWait(delay));
}
private static void testBackgroundPool(Supplier<TreeNode> root, Consumer<ModelTest> consumer, boolean showLoadingNode) {
testBackgroundPool(root, consumer, showLoadingNode, TreeTest.FAST);
testBackgroundPool(root, consumer, showLoadingNode, TreeTest.SLOW);
}
private static void testBackgroundPool(Supplier<TreeNode> root, Consumer<ModelTest> consumer, boolean showLoadingNode, int delay) {
if (consumer != null) new AsyncTest(showLoadingNode, new BackgroundPoolModel(delay, root)).start(consumer, getSecondsToWait(delay));
}
private static void printTime(String prefix, long time) {
time = System.currentTimeMillis() - time;
if (PRINT) System.out.println(prefix + time + " ms");
}
private static void invokeWhenProcessingDone(@NotNull Runnable task, @NotNull AsyncTreeModel model, long time, int count) {
//noinspection SSBasedInspection
SwingUtilities.invokeLater(() -> {
if (model.isProcessing()) {
invokeWhenProcessingDone(task, model, time, 0);
}
else if (count < 2) {
invokeWhenProcessingDone(task, model, time, count + 1);
}
else {
printTime("wait for ", time);
task.run();
}
});
}
/**
* @param delay a delay used to create a slow tree model
* @return a maximal time in seconds allowed for the test
*/
private static int getSecondsToWait(int delay) {
return delay + 20;
}
private static class ModelTest {
private final AsyncPromise<String> promise = new AsyncPromise<>();
private final TreeModel model;
private volatile JTree tree;
private ModelTest(long delay, Supplier<TreeNode> root) {
this(new SlowModel(delay, root));
}
private ModelTest(TreeModel model) {
this.model = model;
}
protected TreeModel createModelForTree(TreeModel model, Disposable disposable) {
return model;
}
void start(@NotNull Consumer<ModelTest> consumer, int seconds) {
if (PRINT) System.out.println("start " + toString());
assert !SwingUtilities.isEventDispatchThread() : "test should be started on the main thread";
long time = System.currentTimeMillis();
Disposable disposable = Disposer.newDisposable();
runOnSwingThread(() -> {
tree = new JTree(createModelForTree(model, disposable));
runOnSwingThreadWhenProcessingDone(() -> consumer.accept(this));
});
try {
promise.blockingGet(seconds, SECONDS);
}
catch (Exception exception) {
//noinspection InstanceofCatchParameter because of Kotlin
if (exception instanceof TimeoutException) {
System.err.println(dumpThreadsToString());
fail(seconds + " seconds is not enough for " + toString());
}
throw exception;
}
finally {
Disposer.dispose(disposable);
printTime("done in ", time);
if (PRINT) System.out.println();
}
}
void done() {
promise.setResult(null);
}
@NotNull
private Runnable wrap(@NotNull Runnable task) {
return () -> {
try {
task.run();
}
catch (Throwable throwable) {
promise.setError(throwable);
}
};
}
private void setRootVisible(boolean visible, @NotNull Runnable task) {
tree.setRootVisible(visible);
runOnSwingThreadWhenProcessingDone(task);
}
private void expand(@NotNull TreePath path, @NotNull Runnable task) {
tree.expandPath(path);
runOnSwingThreadWhenProcessingDone(task);
}
private void collapse(@NotNull TreePath path, @NotNull Runnable task) {
tree.collapsePath(path);
runOnSwingThreadWhenProcessingDone(task);
}
private void fireStructureChanged(@NotNull TreePath path) {
runOnSwingThread(() -> {
tree.getModel().addTreeModelListener(new TreeModelAdapter() {
@Override
protected void process(@NotNull TreeModelEvent event, @NotNull EventType type) {
assertEquals("unexpected tree path", path, event.getTreePath());
//noinspection SSBasedInspection
SwingUtilities.invokeLater(ModelTest.this::done);
}
});
runOnModelThread(() -> {
TreeModelEvent event = new TreeModelEvent(model, path);
TreeModelListener[] listeners = ((DefaultTreeModel)model).getTreeModelListeners();
for (TreeModelListener listener : listeners) listener.treeStructureChanged(event);
});
});
}
private void updateModelAndWait(Consumer<TreeModel> consumer, @NotNull Runnable task) {
runOnModelThread(() -> {
consumer.accept(model);
runOnSwingThreadWhenProcessingDone(task);
});
}
private void runOnModelThread(@NotNull Runnable task) {
if (model instanceof InvokerSupplier) {
InvokerSupplier supplier = (InvokerSupplier)model;
supplier.getInvoker().runOrInvokeLater(wrap(task));
}
else {
runOnSwingThread(task);
}
}
private void runOnSwingThread(@NotNull Runnable task) {
if (SwingUtilities.isEventDispatchThread()) {
wrap(task).run();
}
else {
//noinspection SSBasedInspection
SwingUtilities.invokeLater(wrap(task));
}
}
private void runOnSwingThreadWhenProcessingDone(@NotNull Runnable task) {
TreeModel model = tree.getModel();
if (model instanceof AsyncTreeModel) {
invokeWhenProcessingDone(wrap(task), (AsyncTreeModel)model, System.currentTimeMillis(), 0);
}
else {
runOnSwingThread(task);
}
}
private void resolve(@NotNull TreePath path, @NotNull Consumer<TreePath> consumer) {
AsyncTreeModel model = (AsyncTreeModel)tree.getModel();
model.resolve(path).onError(promise::setError).onSuccess(consumer);
}
private void visit(@NotNull TreeVisitor visitor, boolean allowLoading, @NotNull Consumer<TreePath> consumer) {
AsyncTreeModel model = (AsyncTreeModel)tree.getModel();
model.accept(visitor, allowLoading).onError(promise::setError).onSuccess(consumer);
}
@Override
public String toString() {
return model.toString();
}
}
private static class AsyncTest extends ModelTest {
private final boolean showLoadingNode;
private AsyncTest(boolean showLoadingNode, TreeModel model) {
super(model);
this.showLoadingNode = showLoadingNode;
}
@Override
protected TreeModel createModelForTree(TreeModel model, Disposable disposable) {
return new AsyncTreeModel(model, showLoadingNode, disposable);
}
@Override
public String toString() {
String string = super.toString();
if (showLoadingNode) string = "show loading node " + string;
return string;
}
}
private static class SlowModel extends DefaultTreeModel implements Disposable {
private final long delay;
private SlowModel(long delay, Supplier<TreeNode> root) {
super(root == null ? null : root.get());
this.delay = delay;
}
private void pause() {
if (this instanceof InvokerSupplier && THRESHOLD > Math.random()) {
// sometimes throw an exception to cancel current operation
if (PRINT) System.out.println("interrupt access to model:" + toString());
throw new ProcessCanceledException();
}
if (delay > 0) {
try {
Thread.sleep(delay);
}
catch (InterruptedException ignored) {
}
}
}
@Override
public final Object getRoot() {
pause();
return super.getRoot();
}
@Override
public final Object getChild(Object parent, int index) {
if (index == 0) pause(); // do not pause for every child
return super.getChild(parent, index);
}
@Override
public final int getChildCount(Object parent) {
pause();
return super.getChildCount(parent);
}
@Override
public final boolean isLeaf(Object node) {
pause();
return super.isLeaf(node);
}
@Override
public final int getIndexOfChild(Object parent, Object child) {
pause();
return super.getIndexOfChild(parent, child);
}
@Override
public final void dispose() {
}
@Override
public String toString() {
return getClass().getSimpleName() + "+" + delay + "ms";
}
}
private static final class EventDispatchThreadModel extends SlowModel implements InvokerSupplier {
private final Invoker invoker = new Invoker.EDT(this);
private EventDispatchThreadModel(long delay, Supplier<TreeNode> root) {
super(delay, root);
}
@NotNull
@Override
public Invoker getInvoker() {
return invoker;
}
}
private static final class BackgroundThreadModel extends SlowModel implements InvokerSupplier {
private final Invoker invoker = new Invoker.BackgroundThread(this);
private BackgroundThreadModel(long delay, Supplier<TreeNode> root) {
super(delay, root);
}
@NotNull
@Override
public Invoker getInvoker() {
return invoker;
}
}
private static final class BackgroundPoolModel extends SlowModel implements InvokerSupplier {
private final Invoker invoker = new Invoker.BackgroundPool(this);
private BackgroundPoolModel(long delay, Supplier<TreeNode> root) {
super(delay, root);
}
@NotNull
@Override
public Invoker getInvoker() {
return invoker;
}
}
private static class Node extends DefaultMutableTreeNode {
private final boolean mutable;
private Node(String content, boolean mutable) {
this(mutable, content, EMPTY_OBJECT_ARRAY);
}
private Node(String content, Object... children) {
this(false, content, children);
}
private Node(boolean mutable, Object content, Object... children) {
super(content);
this.mutable = mutable;
for (Object child : children) {
add(child instanceof MutableTreeNode
? (MutableTreeNode)child
: new Node(mutable, child, EMPTY_OBJECT_ARRAY));
}
}
@Override
public int hashCode() {
if (!mutable) return super.hashCode();
Object content = getUserObject();
return content == null ? 0 : content.hashCode();
}
@Override
public boolean equals(Object object) {
if (!mutable) return super.equals(object);
if (object instanceof Node) {
Node node = (Node)object;
if (node.mutable) return Objects.equals(getUserObject(), node.getUserObject());
}
return false;
}
@Override
public String toString() {
Object content = getUserObject();
if (content == null) return "null";
return "'" + content + "'";
}
}
@Test
public void testNodePreservingOnEventDispatchThread() {
testNodePreservingOnEventDispatchThread(false);
testNodePreservingOnEventDispatchThread(true);
}
private static void testNodePreservingOnEventDispatchThread(boolean showLoadingNode) {
testNodePreserving(showLoadingNode, new GroupModel() {
private final Invoker invoker = new Invoker.EDT(this);
@NotNull
@Override
public Invoker getInvoker() {
return invoker;
}
});
}
@Test
public void testNodePreservingOnBackgroundThread() {
testNodePreservingOnBackgroundThread(false);
testNodePreservingOnBackgroundThread(true);
}
private static void testNodePreservingOnBackgroundThread(boolean showLoadingNode) {
testNodePreserving(showLoadingNode, new GroupModel() {
private final Invoker invoker = new Invoker.BackgroundThread(this);
@NotNull
@Override
public Invoker getInvoker() {
return invoker;
}
});
}
@Test
public void testNodePreservingOnBackgroundPool() {
testNodePreservingOnBackgroundPool(false);
testNodePreservingOnBackgroundPool(true);
}
private static void testNodePreservingOnBackgroundPool(boolean showLoadingNode) {
testNodePreserving(showLoadingNode, new GroupModel() {
private final Invoker invoker = new Invoker.BackgroundPool(this);
@NotNull
@Override
public Invoker getInvoker() {
return invoker;
}
});
}
private static void testNodePreserving(boolean showLoadingNode, @NotNull GroupModel model) {
new AsyncTest(showLoadingNode, model).start(test -> testPathState(test.tree, " +root\n node\n", ()
-> testNodePreserving(test, model, "first", ()
-> testNodePreserving(test, model, "second", ()
-> testNodePreserving(test, model, null, ()
-> testNodePreserving(test, model, "third", test::done))))), 10);
}
private static void testNodePreserving(@NotNull ModelTest test, @NotNull GroupModel model, Object group, @NotNull Runnable task) {
model.setGroup(group, () -> test.runOnSwingThreadWhenProcessingDone(() -> test.visit(path -> {
test.tree.makeVisible(path);
return TreeVisitor.Action.CONTINUE;
}, true, done -> testPathState(test.tree, group != null
? " +root\n +" + group + "\n +node\n leaf\n"
: " +root\n +node\n leaf\n", task))));
}
static abstract class GroupModel extends AbstractTreeModel implements InvokerSupplier {
private final Object myRoot = new StringBuilder("root");
private final Object myNode = new StringBuilder("node");
private final Object myLeaf = new StringBuilder("leaf");
private volatile Object myGroup;
public void setGroup(Object group, @NotNull Runnable task) {
getInvoker().runOrInvokeLater(() -> {
myGroup = group;
treeStructureChanged(null, null, null);
task.run();
});
}
@Override
public final Object getRoot() {
return myRoot;
}
@Override
public final Object getChild(Object parent, int index) {
if (index == 0) {
Object group = myGroup;
if (group == null) {
if (myRoot.equals(parent)) return myNode;
}
else {
if (myRoot.equals(parent)) return group;
if (group.equals(parent)) return myNode;
}
if (myNode.equals(parent)) return myLeaf;
}
throw new IllegalStateException();
}
@Override
public final int getChildCount(Object parent) {
Object group = myGroup;
if (myRoot.equals(parent) || group != null && group.equals(parent)) return 1;
if (myNode.equals(parent)) return 1;
if (myLeaf.equals(parent)) return 0;
throw new IllegalStateException();
}
@Override
public final boolean isLeaf(Object node) {
Object group = myGroup;
if (myRoot.equals(node) || group != null && group.equals(node)) return false;
if (myNode.equals(node)) return false;
if (myLeaf.equals(node)) return true;
throw new IllegalStateException();
}
@Override
public final int getIndexOfChild(Object parent, Object child) {
Object group = myGroup;
if (group == null) {
if (myRoot.equals(parent) && myNode.equals(child)) return 0;
}
else {
if (myRoot.equals(parent) && group.equals(child)) return 0;
if (group.equals(parent) && myNode.equals(child)) return 0;
}
if (myNode.equals(parent) && myLeaf.equals(child)) return 0;
throw new IllegalStateException();
}
@Override
public void valueForPathChanged(TreePath path, Object value) {
throw new IllegalStateException();
}
@Override
public String toString() {
return getClass().getName();
}
}
}
| |
/**
* SOSExcellence S.A. de C.V. all rights reserved 2016.
*/
package com.biancco.admin.service.impl;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpSession;
import org.apache.commons.lang.builder.ReflectionToStringBuilder;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.servlet.ModelAndView;
import com.biancco.admin.app.exception.DBException;
import com.biancco.admin.app.util.BianccoConstants;
import com.biancco.admin.model.catalog.RoleSimpleRecord;
import com.biancco.admin.model.company.CompanySimpleRecord;
import com.biancco.admin.model.employee.EmployeeSimpleRecord;
import com.biancco.admin.model.view.CompanyModuleView;
import com.biancco.admin.model.view.EmployeeModuleView;
import com.biancco.admin.model.view.FolderView;
import com.biancco.admin.model.view.InitialView;
import com.biancco.admin.model.view.WorkModuleView;
import com.biancco.admin.persistence.dao.CompanyDAO;
import com.biancco.admin.persistence.dao.EmployeeDAO;
import com.biancco.admin.persistence.dao.RoleDAO;
import com.biancco.admin.persistence.model.Company;
import com.biancco.admin.persistence.model.Employee;
import com.biancco.admin.persistence.model.FolderType;
import com.biancco.admin.persistence.model.PermissionType;
import com.biancco.admin.service.CommonService;
import com.biancco.admin.service.FolderService;
/**
* Common service implementation.
*
* @author SOSExcellence.
*/
public class CommonServiceImpl implements CommonService {
/**
* Logger.
*/
private Logger logger = Logger.getRootLogger();
/**
* Folder service.
*/
@Autowired
private FolderService folderService;
/**
* Employee DAO.
*/
@Autowired
private EmployeeDAO employeeDAO;
/**
* Role DAO.
*/
@Autowired
private RoleDAO roleDAO;
/**
* Company DAO.
*/
@Autowired
private CompanyDAO companyDAO;
/**
* The main pages for modules.
*/
private static final Map<String, String> pages;
/**
* initialize pages.
*/
static {
pages = new HashMap<String, String>();
pages.put("company", "list");
pages.put("employee", "list");
pages.put("work", "list");
// modules.put("CONFIGURACION", "configuration");
}
/**
* Roles.
*/
private static List<RoleSimpleRecord> roles = new ArrayList<RoleSimpleRecord>();
/**
* {@inheritDoc}
*/
@Override
public ModelAndView getViewByModule(String module, HttpSession session) throws DBException {
// build view
ModelAndView view = new ModelAndView("/page/" + module + "/" + pages.get(module));
// get initial information
Object info = this.getInitialInformationByModule(module, session);
view.addObject(BianccoConstants.MODEL_ATTRIBUTE, info);
return view;
}
/**
* {@inheritDoc}
*/
@Override
public FolderView getTreeFolder(String type, long ownerModuleId, HttpSession session) throws DBException {
// get folder type
FolderType fType = FolderType.fromName(type);
logger.info(ReflectionToStringBuilder.toString(fType));
logger.info(ReflectionToStringBuilder.toString(session.getAttributeNames()));
// set permission type
PermissionType pType = (PermissionType) session.getAttribute(fType.getOwnerModule());
// build view
FolderView fv = new FolderView();
fv.setpType(pType);
fv.setOwnerModuleId(ownerModuleId);
// get tree folder
fv.setFolder(this.folderService.getTreeFolderByType(fType, ownerModuleId));
return fv;
}
/**
* Gets initial information by module view.
*
* @param module
* The module.
* @param session
* HTTP session.
* @return Initial information.
* @throws DBException
*/
private Object getInitialInformationByModule(String module, HttpSession session) throws DBException {
// get permission type from session
PermissionType pType = (PermissionType) session.getAttribute(module);
// set info by module
Object info = null;
if ("employee".equals(module)) {
// set info by module
info = new EmployeeModuleView();
this.setEmployees((EmployeeModuleView) info);
} else if ("company".equals(module)) {
// set info by module
info = new CompanyModuleView();
this.setCompanies((CompanyModuleView) info, session);
} else if ("work".equals(module)) {
// set info by module
info = new WorkModuleView();
this.setWorks((WorkModuleView) info, session);
}
// set permission
this.setPermission(info, pType);
return info;
}
/**
* Sets employees to info model.
*
* @param info
* The info.
* @throws DBException
*/
private void setEmployees(EmployeeModuleView info) throws DBException {
List<EmployeeSimpleRecord> result = this.employeeDAO.getAll();
info.setEmployees(result);
}
/**
* Sets companies to info model.
*
* @param info
* The info.
* @param session
* @throws DBException
*/
private void setCompanies(CompanyModuleView info, HttpSession session) throws DBException {
Employee emp = (Employee) session.getAttribute(BianccoConstants.ATTR_USER);
List<CompanySimpleRecord> result = null;
if ( "SUPERINTENDENTE".equals(emp.getRole().getName()) ) {
result = this.companyDAO.getCompanyBySuperintendente(emp.getIdEmployee());
} else if ("RESIDENTE".equals(emp.getRole().getName())) {
result = this.companyDAO.getCompanyByResidente(emp.getIdEmployee());
} else {
result = this.companyDAO.getAll();
}
info.setCompanies(result);
}
/**
* Sets works to info model.
*
* @param info
* The info.
* @param session
* @throws DBException
*/
private void setWorks(WorkModuleView info, HttpSession session) throws DBException {
Long idCompany = (Long) session.getAttribute(BianccoConstants.ID_COMPANY);
// get info needed of view
Company comp = this.companyDAO.getById(idCompany);
info.setCompany(comp);
}
/**
* Sets permission type to info model.
*
* @param info
* Info model.
* @param pType
* Permission type.
*/
private void setPermission(Object info, PermissionType pType) {
try {
Method m = info.getClass().getDeclaredMethod("setpType", PermissionType.class);
m.invoke(info, pType);
} catch (Exception e) {
this.logger.error("Error on set permission", e);
}
}
/**
* {@inheritDoc}
*/
@Override
public List<RoleSimpleRecord> getRoles(boolean enabledOnly) throws DBException {
if (roles.isEmpty()) {
roles = this.roleDAO.getAll(enabledOnly);
}
return roles;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings("unchecked")
@Override
public ModelAndView backToView(HttpSession session, Integer indexView) {
// get history
LinkedList<ModelAndView> history = (LinkedList<ModelAndView>) session
.getAttribute(BianccoConstants.VIEW_HISTORY);
// get number of views to remove
int size = history.size();
int diff = size - (size - indexView);
// get main view if apply
if (indexView == 0 || (diff == size - 1)) {
// indexView = size - 1;
ModelAndView mainView = history.get(0);
// initialize history
history = new LinkedList<ModelAndView>();
history.add(this.getMainView(mainView));
} else {
// remove unused views
for (int index = 0; index < diff; index++) {
history.removeLast();
}
}
// update history
session.removeAttribute(BianccoConstants.VIEW_HISTORY);
session.setAttribute(BianccoConstants.VIEW_HISTORY, history);
// get last view
return history.getLast();
}
/**
* {@inheritDoc}
*/
@SuppressWarnings("unchecked")
@Override
public void addViewToHistory(ModelAndView view, boolean updatePreviousView, HttpSession session) {
LinkedList<ModelAndView> history = null;
// validate view history
if (session.getAttribute(BianccoConstants.VIEW_HISTORY) == null) {
history = new LinkedList<ModelAndView>();
} else {
history = (LinkedList<ModelAndView>) session.getAttribute(BianccoConstants.VIEW_HISTORY);
}
// remove previous view if apply
if (updatePreviousView) {
history.removeLast();
history.removeLast();
}
// add view
history.add(view);
// update history
session.removeAttribute(BianccoConstants.VIEW_HISTORY);
session.setAttribute(BianccoConstants.VIEW_HISTORY, history);
}
/**
* Gets the main view content.
*
* @param mainView
* Main page.
* @return Main view content.
*/
private ModelAndView getMainView(ModelAndView mainView) {
// get initial info
InitialView iView = (InitialView) mainView.getModel().get(BianccoConstants.MODEL_ATTRIBUTE);
mainView = new ModelAndView("/page/modules");
mainView.addObject(BianccoConstants.MODEL_ATTRIBUTE, iView);
return mainView;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tracing;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsTracer;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.htrace.core.Sampler;
import org.apache.htrace.core.Span;
import org.apache.htrace.core.TraceScope;
import org.apache.htrace.core.Tracer;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestTracing {
private static MiniDFSCluster cluster;
private static DistributedFileSystem dfs;
private Tracer prevTracer;
private final static Configuration TRACING_CONF;
private final static Configuration NO_TRACING_CONF;
static {
NO_TRACING_CONF = new Configuration();
NO_TRACING_CONF.setLong("dfs.blocksize", 100 * 1024);
TRACING_CONF = new Configuration(NO_TRACING_CONF);
TRACING_CONF.set(CommonConfigurationKeys.FS_CLIENT_HTRACE_PREFIX +
Tracer.SPAN_RECEIVER_CLASSES_KEY,
SetSpanReceiver.class.getName());
TRACING_CONF.set(CommonConfigurationKeys.FS_CLIENT_HTRACE_PREFIX +
Tracer.SAMPLER_CLASSES_KEY, "AlwaysSampler");
}
@Test
public void testTracing() throws Exception {
// write and read without tracing started
String fileName = "testTracingDisabled.dat";
writeTestFile(fileName);
Assert.assertEquals(0, SetSpanReceiver.size());
readTestFile(fileName);
Assert.assertEquals(0, SetSpanReceiver.size());
writeTestFile("testReadTraceHooks.dat");
FsTracer.clear();
Tracer tracer = FsTracer.get(TRACING_CONF);
writeWithTracing(tracer);
readWithTracing(tracer);
}
private void writeWithTracing(Tracer tracer) throws Exception {
long startTime = System.currentTimeMillis();
TraceScope ts = tracer.newScope("testWriteTraceHooks");
writeTestFile("testWriteTraceHooks.dat");
long endTime = System.currentTimeMillis();
ts.close();
String[] expectedSpanNames = {
"testWriteTraceHooks",
"ClientProtocol#create",
"ClientNamenodeProtocol#create",
"ClientProtocol#fsync",
"ClientNamenodeProtocol#fsync",
"ClientProtocol#complete",
"ClientNamenodeProtocol#complete",
"newStreamForCreate",
"DFSOutputStream#write",
"DFSOutputStream#close",
"dataStreamer",
"OpWriteBlockProto",
"ClientProtocol#addBlock",
"ClientNamenodeProtocol#addBlock"
};
SetSpanReceiver.assertSpanNamesFound(expectedSpanNames);
// The trace should last about the same amount of time as the test
Map<String, List<Span>> map = SetSpanReceiver.getMap();
Span s = map.get("testWriteTraceHooks").get(0);
Assert.assertNotNull(s);
long spanStart = s.getStartTimeMillis();
long spanEnd = s.getStopTimeMillis();
// Spans homed in the top trace shoud have same trace id.
// Spans having multiple parents (e.g. "dataStreamer" added by HDFS-7054)
// and children of them are exception.
String[] spansInTopTrace = {
"testWriteTraceHooks",
"ClientProtocol#create",
"ClientNamenodeProtocol#create",
"ClientProtocol#fsync",
"ClientNamenodeProtocol#fsync",
"ClientProtocol#complete",
"ClientNamenodeProtocol#complete",
"newStreamForCreate",
"DFSOutputStream#write",
"DFSOutputStream#close",
};
for (String desc : spansInTopTrace) {
for (Span span : map.get(desc)) {
Assert.assertEquals(ts.getSpan().getSpanId().getHigh(),
span.getSpanId().getHigh());
}
}
// test for timeline annotation added by HADOOP-11242
Assert.assertEquals("called",
map.get("ClientProtocol#create")
.get(0).getTimelineAnnotations()
.get(0).getMessage());
SetSpanReceiver.clear();
}
private void readWithTracing(Tracer tracer) throws Exception {
long startTime = System.currentTimeMillis();
TraceScope ts = tracer.newScope("testReadTraceHooks");
readTestFile("testReadTraceHooks.dat");
ts.close();
long endTime = System.currentTimeMillis();
String[] expectedSpanNames = {
"testReadTraceHooks",
"ClientProtocol#getBlockLocations",
"ClientNamenodeProtocol#getBlockLocations",
"OpReadBlockProto"
};
SetSpanReceiver.assertSpanNamesFound(expectedSpanNames);
// The trace should last about the same amount of time as the test
Map<String, List<Span>> map = SetSpanReceiver.getMap();
Span s = map.get("testReadTraceHooks").get(0);
Assert.assertNotNull(s);
long spanStart = s.getStartTimeMillis();
long spanEnd = s.getStopTimeMillis();
Assert.assertTrue(spanStart - startTime < 100);
Assert.assertTrue(spanEnd - endTime < 100);
// There should only be one trace id as it should all be homed in the
// top trace.
for (Span span : SetSpanReceiver.getSpans()) {
System.out.println(span.toJson());
}
for (Span span : SetSpanReceiver.getSpans()) {
Assert.assertEquals(ts.getSpan().getSpanId().getHigh(),
span.getSpanId().getHigh());
}
SetSpanReceiver.clear();
}
private void writeTestFile(String testFileName) throws Exception {
Path filePath = new Path(testFileName);
FSDataOutputStream stream = dfs.create(filePath);
for (int i = 0; i < 10; i++) {
byte[] data = RandomStringUtils.randomAlphabetic(102400).getBytes();
stream.write(data);
}
stream.hsync();
stream.close();
}
private void readTestFile(String testFileName) throws Exception {
Path filePath = new Path(testFileName);
FSDataInputStream istream = dfs.open(filePath, 10240);
ByteBuffer buf = ByteBuffer.allocate(10240);
int count = 0;
try {
while (istream.read(buf) > 0) {
count += 1;
buf.clear();
istream.seek(istream.getPos() + 5);
}
} catch (IOException ioe) {
// Ignore this it's probably a seek after eof.
} finally {
istream.close();
}
}
@Before
public void startCluster() throws IOException {
cluster = new MiniDFSCluster.Builder(NO_TRACING_CONF)
.numDataNodes(3)
.build();
cluster.waitActive();
dfs = cluster.getFileSystem();
SetSpanReceiver.clear();
}
@After
public void shutDown() throws IOException {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
FsTracer.clear();
}
}
| |
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Objects;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ToStringUtils;
/**
* A range query that works on top of the doc values APIs. Such queries are
* usually slow since they do not use an inverted index. However, in the
* dense case where most documents match this query, it <b>might</b> be as
* fast or faster than a regular {@link NumericRangeQuery}.
* @lucene.experimental
*/
public final class DocValuesRangeQuery extends Query {
/** Create a new numeric range query on a numeric doc-values field. The field
* must has been indexed with either {@link DocValuesType#NUMERIC} or
* {@link DocValuesType#SORTED_NUMERIC} doc values. */
public static Query newLongRange(String field, Long lowerVal, Long upperVal, boolean includeLower, boolean includeUpper) {
return new DocValuesRangeQuery(field, lowerVal, upperVal, includeLower, includeUpper);
}
/** Create a new numeric range query on a numeric doc-values field. The field
* must has been indexed with {@link DocValuesType#SORTED} or
* {@link DocValuesType#SORTED_SET} doc values. */
public static Query newBytesRefRange(String field, BytesRef lowerVal, BytesRef upperVal, boolean includeLower, boolean includeUpper) {
return new DocValuesRangeQuery(field, deepCopyOf(lowerVal), deepCopyOf(upperVal), includeLower, includeUpper);
}
private static BytesRef deepCopyOf(BytesRef b) {
if (b == null) {
return null;
} else {
return BytesRef.deepCopyOf(b);
}
}
private final String field;
private final Object lowerVal, upperVal;
private final boolean includeLower, includeUpper;
private DocValuesRangeQuery(String field, Object lowerVal, Object upperVal, boolean includeLower, boolean includeUpper) {
this.field = Objects.requireNonNull(field);
this.lowerVal = lowerVal;
this.upperVal = upperVal;
this.includeLower = includeLower;
this.includeUpper = includeUpper;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof DocValuesRangeQuery == false) {
return false;
}
final DocValuesRangeQuery that = (DocValuesRangeQuery) obj;
return field.equals(that.field)
&& Objects.equals(lowerVal, that.lowerVal)
&& Objects.equals(upperVal, that.upperVal)
&& includeLower == that.includeLower
&& includeUpper == that.includeUpper
&& super.equals(obj);
}
@Override
public int hashCode() {
return Objects.hash(field, lowerVal, upperVal, includeLower, includeUpper, getBoost());
}
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
if (this.field.equals(field) == false) {
sb.append(this.field).append(':');
}
sb.append(includeLower ? '[' : '{');
sb.append(lowerVal == null ? "*" : lowerVal.toString());
sb.append(" TO ");
sb.append(upperVal == null ? "*" : upperVal.toString());
sb.append(includeUpper ? ']' : '}');
sb.append(ToStringUtils.boost(getBoost()));
return sb.toString();
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (lowerVal == null && upperVal == null) {
final FieldValueQuery rewritten = new FieldValueQuery(field);
rewritten.setBoost(getBoost());
return rewritten;
}
return this;
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
if (lowerVal == null && upperVal == null) {
throw new IllegalStateException("Both min and max values cannot be null, call rewrite first");
}
return new RandomAccessWeight(DocValuesRangeQuery.this) {
@Override
protected Bits getMatchingDocs(final LeafReaderContext context) throws IOException {
if (lowerVal instanceof Long || upperVal instanceof Long) {
final SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), field);
final long min;
if (lowerVal == null) {
min = Long.MIN_VALUE;
} else if (includeLower) {
min = (long) lowerVal;
} else {
min = 1 + (long) lowerVal;
}
final long max;
if (upperVal == null) {
max = Long.MAX_VALUE;
} else if (includeUpper) {
max = (long) upperVal;
} else {
max = -1 + (long) upperVal;
}
if (min > max) {
return null;
}
return new Bits() {
@Override
public boolean get(int doc) {
values.setDocument(doc);
final int count = values.count();
for (int i = 0; i < count; ++i) {
final long value = values.valueAt(i);
if (value >= min && value <= max) {
return true;
}
}
return false;
}
@Override
public int length() {
return context.reader().maxDoc();
}
};
} else if (lowerVal instanceof BytesRef || upperVal instanceof BytesRef) {
final SortedSetDocValues values = DocValues.getSortedSet(context.reader(), field);
final long minOrd;
if (lowerVal == null) {
minOrd = 0;
} else {
final long ord = values.lookupTerm((BytesRef) lowerVal);
if (ord < 0) {
minOrd = -1 - ord;
} else if (includeLower) {
minOrd = ord;
} else {
minOrd = ord + 1;
}
}
final long maxOrd;
if (upperVal == null) {
maxOrd = values.getValueCount() - 1;
} else {
final long ord = values.lookupTerm((BytesRef) upperVal);
if (ord < 0) {
maxOrd = -2 - ord;
} else if (includeUpper) {
maxOrd = ord;
} else {
maxOrd = ord - 1;
}
}
if (minOrd > maxOrd) {
return null;
}
return new Bits() {
@Override
public boolean get(int doc) {
values.setDocument(doc);
for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) {
if (ord >= minOrd && ord <= maxOrd) {
return true;
}
}
return false;
}
@Override
public int length() {
return context.reader().maxDoc();
}
};
} else {
throw new AssertionError();
}
}
};
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.