gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package org.jgraphxplorer.interaction; import java.awt.event.*; import java.awt.*; import javax.vecmath.Point2d; import org.jgraphxplorer.layout.TGAbstractLens; import org.jgraphxplorer.listeners.GraphListener; import org.jgraphxplorer.renderer.IGraphManager; /** * RotateScroll. Allows one to rotate the graph by clicking+dragging The rotate * lens won't work properly unless it's the top lens, because it does not * account for distortion from above lenses. Methods for getting lenses above * the current one need to be added to TGLensSet to solve this problem. * * @author Alexander Shapiro * @version 1.22-jre1.1 $Id: RotateScroll.java,v 1.2 2002/09/23 18:45:48 * ldornbusch Exp $ */ public class RotateScroll implements GraphListener { RotateLens rotateLens; double rotateAngle; RotateDragUI rotateDragUI; private DScrollBar rotateSB; boolean adjustmentIsInternal; private IGraphManager manager; // ............ /** * Constructor with TGPanel <tt>tgp</tt>. */ public RotateScroll(IGraphManager tgp) { manager = tgp; rotateAngle = 0; rotateLens = new RotateLens(); rotateDragUI = new RotateDragUI(); rotateSB = new DScrollBar(Scrollbar.HORIZONTAL, 0, 80, -314, 318); rotateSB.addAdjustmentListener(new rotateAdjustmentListener()); adjustmentIsInternal = false; manager.addGraphListener(this); } public RotateLens getLens() { return rotateLens; } /** * @return * @uml.property name="rotateSB" */ public Scrollbar getRotateSB() { return rotateSB; } /** * @return * @uml.property name="rotateDragUI" */ public RotateDragUI getRotateDragUI() { return rotateDragUI; } public int getRotationAngle() { double orientedValue = rotateSB.getValue() - rotateSB.getMinimum(); double range = rotateSB.getMaximum() - rotateSB.getMinimum() - rotateSB.getVisibleAmount(); return (int) ((orientedValue / range) * 359); } public void setRotationAngle(int angle) { double range = rotateSB.getMaximum() - rotateSB.getMinimum() - rotateSB.getVisibleAmount(); rotateSB.setValue((int) (angle / 359.0 * range + 0.5) + rotateSB.getMinimum()); } public void graphMoved() { } // From GraphListener interface public void graphReset() { rotateAngle = 0; rotateSB.setValue(0); } // From GraphListener interface private class rotateAdjustmentListener implements AdjustmentListener { public void adjustmentValueChanged(AdjustmentEvent e) { if (!adjustmentIsInternal) { rotateAngle = rotateSB.getDValue() / 100.0; manager.repaintAfterMove(); } } } class DScrollBar extends Scrollbar { private static final long serialVersionUID = 6629794876897303060L; private double doubleValue; DScrollBar(int orient, int val, int vis, int min, int max) { super(orient, val, vis, min, max); doubleValue = val; } @Override public void setValue(int v) { doubleValue = v; super.setValue(v); } public void setIValue(int v) { super.setValue(v); } public void setDValue(double v) { doubleValue = Math.max(getMinimum(), Math.min(getMaximum(), v)); setIValue((int) v); } public double getDValue() { return doubleValue; } } double computeAngle(double x, double y) { double angle = Math.atan(y / x); if (x == 0) // There is probably a better way of hangling boundary // conditions, but whatever if (y > 0) angle = Math.PI / 2; else angle = -Math.PI / 2; if (x < 0) angle += Math.PI; return angle; } class RotateLens extends TGAbstractLens { @Override protected void applyLens(Point2d p) { double currentAngle = computeAngle(p.x, p.y); double dist = Math.sqrt((p.x * p.x) + (p.y * p.y)); p.x = dist * Math.cos(currentAngle + rotateAngle); p.y = dist * Math.sin(currentAngle + rotateAngle); } @Override protected void undoLens(Point2d p) { double currentAngle = computeAngle(p.x, p.y); double dist = Math.sqrt((p.x * p.x) + (p.y * p.y)); p.x = dist * Math.cos(currentAngle - rotateAngle); p.y = dist * Math.sin(currentAngle - rotateAngle); } } public void incrementRotateAngle(double inc) { rotateAngle += inc; if (rotateAngle > Math.PI) rotateAngle -= Math.PI * 2; if (rotateAngle < -Math.PI) rotateAngle += Math.PI * 2; adjustmentIsInternal = true; rotateSB.setDValue(rotateAngle * 100); adjustmentIsInternal = false; } class RotateDragUI extends TGAbstractDragUI { // Will work best if rotate // lens is top lens RotateDragUI() { super(RotateScroll.this.manager); } double lastAngle; double getMouseAngle(double x, double y) { return computeAngle(x - manager.getDrawCenter().x, y - manager.getDrawCenter().y); } @Override public void preActivate() { } @Override public void preDeactivate() { } @Override public void mousePressed(MouseEvent e) { lastAngle = getMouseAngle(e.getX(), e.getY()); } @Override public void mouseReleased(MouseEvent e) { } @Override public void mouseDragged(MouseEvent e) { double currentAngle = getMouseAngle(e.getX(), e.getY()); incrementRotateAngle(currentAngle - lastAngle); lastAngle = currentAngle; manager.repaintAfterMove(); } } }
package nxt.http; import java.io.IOException; import java.io.PrintWriter; import java.io.Writer; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import nxt.Nxt; import nxt.NxtException; import nxt.util.JSON; import nxt.util.Logger; import org.json.simple.JSONStreamAware; public final class APIServlet extends HttpServlet { static abstract class APIRequestHandler { abstract JSONStreamAware processRequest(HttpServletRequest paramHttpServletRequest) throws NxtException; boolean requirePost() { return false; } } private static final boolean enforcePost = Nxt.getBooleanProperty("nxt.apiServerEnforcePOST").booleanValue(); private static final Map<String, APIRequestHandler> apiRequestHandlers; static { HashMap localHashMap = new HashMap(); localHashMap.put("assignAlias", AssignAlias.instance); localHashMap.put("broadcastTransaction", BroadcastTransaction.instance); localHashMap.put("cancelAskOrder", CancelAskOrder.instance); localHashMap.put("cancelBidOrder", CancelBidOrder.instance); localHashMap.put("castVote", CastVote.instance); localHashMap.put("createPoll", CreatePoll.instance); localHashMap.put("decodeHallmark", DecodeHallmark.instance); localHashMap.put("decodeToken", DecodeToken.instance); localHashMap.put("generateToken", GenerateToken.instance); localHashMap.put("getAccount", GetAccount.instance); localHashMap.put("getAccountBlockIds", GetAccountBlockIds.instance); localHashMap.put("getAccountId", GetAccountId.instance); localHashMap.put("getAccountPublicKey", GetAccountPublicKey.instance); localHashMap.put("getAccountTransactionIds", GetAccountTransactionIds.instance); localHashMap.put("getAlias", GetAlias.instance); localHashMap.put("getAliasId", GetAliasId.instance); localHashMap.put("getAliasIds", GetAliasIds.instance); localHashMap.put("getAliasURI", GetAliasURI.instance); localHashMap.put("getAsset", GetAsset.instance); localHashMap.put("getAssetIds", GetAssetIds.instance); localHashMap.put("getBalance", GetBalance.instance); localHashMap.put("getBlock", GetBlock.instance); localHashMap.put("getConstants", GetConstants.instance); localHashMap.put("getGuaranteedBalance", GetGuaranteedBalance.instance); localHashMap.put("getMyInfo", GetMyInfo.instance); localHashMap.put("getPeer", GetPeer.instance); localHashMap.put("getPeers", GetPeers.instance); localHashMap.put("getPoll", GetPoll.instance); localHashMap.put("getPollIds", GetPollIds.instance); localHashMap.put("getState", GetState.instance); localHashMap.put("getTime", GetTime.instance); localHashMap.put("getTrades", GetTrades.instance); localHashMap.put("getTransaction", GetTransaction.instance); localHashMap.put("getTransactionBytes", GetTransactionBytes.instance); localHashMap.put("getUnconfirmedTransactionIds", GetUnconfirmedTransactionIds.instance); localHashMap.put("getAccountCurrentAskOrderIds", GetAccountCurrentAskOrderIds.instance); localHashMap.put("getAccountCurrentBidOrderIds", GetAccountCurrentBidOrderIds.instance); localHashMap.put("getAskOrder", GetAskOrder.instance); localHashMap.put("getAskOrderIds", GetAskOrderIds.instance); localHashMap.put("getBidOrder", GetBidOrder.instance); localHashMap.put("getBidOrderIds", GetBidOrderIds.instance); localHashMap.put("issueAsset", IssueAsset.instance); localHashMap.put("listAccountAliases", ListAccountAliases.instance); localHashMap.put("markHost", MarkHost.instance); localHashMap.put("placeAskOrder", PlaceAskOrder.instance); localHashMap.put("placeBidOrder", PlaceBidOrder.instance); localHashMap.put("sendMessage", SendMessage.instance); localHashMap.put("sendMoney", SendMoney.instance); localHashMap.put("startForging", StartForging.instance); localHashMap.put("stopForging", StopForging.instance); localHashMap.put("getForging", GetForging.instance); localHashMap.put("transferAsset", TransferAsset.instance); apiRequestHandlers = Collections.unmodifiableMap(localHashMap); } protected void doGet(HttpServletRequest paramHttpServletRequest, HttpServletResponse paramHttpServletResponse) throws ServletException, IOException { process(paramHttpServletRequest, paramHttpServletResponse); } protected void doPost(HttpServletRequest paramHttpServletRequest, HttpServletResponse paramHttpServletResponse) throws ServletException, IOException { process(paramHttpServletRequest, paramHttpServletResponse); } private void process(HttpServletRequest paramHttpServletRequest, HttpServletResponse paramHttpServletResponse) throws IOException { paramHttpServletResponse.setHeader("Cache-Control", "no-cache, no-store, must-revalidate, private"); paramHttpServletResponse.setHeader("Pragma", "no-cache"); paramHttpServletResponse.setDateHeader("Expires", 0L); JSONStreamAware localJSONStreamAware = JSON.emptyJSON; try { Object localObject1; Object localObject2; if ((API.allowedBotHosts != null) && (!API.allowedBotHosts.contains(paramHttpServletRequest.getRemoteHost()))) { localJSONStreamAware = JSONResponses.ERROR_NOT_ALLOWED; paramHttpServletResponse.setContentType("text/plain; charset=UTF-8"); localObject1 = paramHttpServletResponse.getWriter();localObject2 = null; try { localJSONStreamAware.writeJSONString((Writer)localObject1); } catch (Throwable localThrowable2) { localObject2 = localThrowable2; throw localThrowable2; } finally { if (localObject1 != null) { if (localObject2 != null) { try { ((Writer)localObject1).close(); } catch (Throwable localThrowable7) { ((Throwable)localObject2).addSuppressed(localThrowable7); } } else { ((Writer)localObject1).close(); } } } } else { localObject1 = paramHttpServletRequest.getParameter("requestType"); Object localObject3; if (localObject1 == null) { localJSONStreamAware = JSONResponses.ERROR_INCORRECT_REQUEST; paramHttpServletResponse.setContentType("text/plain; charset=UTF-8"); localObject2 = paramHttpServletResponse.getWriter();localObject3 = null; try { localJSONStreamAware.writeJSONString((Writer)localObject2); } catch (Throwable localThrowable6) { localObject3 = localThrowable6; throw localThrowable6; } finally { if (localObject2 != null) { if (localObject3 != null) { try { ((Writer)localObject2).close(); } catch (Throwable localThrowable12) { ((Throwable)localObject3).addSuppressed(localThrowable12); } } else { ((Writer)localObject2).close(); } } } } else { localObject2 = (APIRequestHandler)apiRequestHandlers.get(localObject1); Object localObject5; if (localObject2 == null) { localJSONStreamAware = JSONResponses.ERROR_INCORRECT_REQUEST; paramHttpServletResponse.setContentType("text/plain; charset=UTF-8"); localObject3 = paramHttpServletResponse.getWriter();localObject5 = null; try { localJSONStreamAware.writeJSONString((Writer)localObject3); } catch (Throwable localThrowable9) { localObject5 = localThrowable9; throw localThrowable9; } finally { if (localObject3 != null) { if (localObject5 != null) { try { ((Writer)localObject3).close(); } catch (Throwable localThrowable13) { localObject5.addSuppressed(localThrowable13); } } else { ((Writer)localObject3).close(); } } } } else if ((enforcePost) && (((APIRequestHandler)localObject2).requirePost()) && (!"POST".equals(paramHttpServletRequest.getMethod()))) { localJSONStreamAware = JSONResponses.POST_REQUIRED; paramHttpServletResponse.setContentType("text/plain; charset=UTF-8"); localObject3 = paramHttpServletResponse.getWriter();localObject5 = null; try { localJSONStreamAware.writeJSONString((Writer)localObject3); } catch (Throwable localThrowable11) { localObject5 = localThrowable11; throw localThrowable11; } finally { if (localObject3 != null) { if (localObject5 != null) { try { ((Writer)localObject3).close(); } catch (Throwable localThrowable14) { localObject5.addSuppressed(localThrowable14); } } else { ((Writer)localObject3).close(); } } } } else { try { localJSONStreamAware = ((APIRequestHandler)localObject2).processRequest(paramHttpServletRequest); } catch (NxtException|RuntimeException localNxtException) { Logger.logDebugMessage("Error processing API request", localNxtException); localJSONStreamAware = JSONResponses.ERROR_INCORRECT_REQUEST; } paramHttpServletResponse.setContentType("text/plain; charset=UTF-8"); localObject1 = paramHttpServletResponse.getWriter();localObject2 = null; try { localJSONStreamAware.writeJSONString((Writer)localObject1); } catch (Throwable localThrowable4) { localObject2 = localThrowable4; throw localThrowable4; } finally { if (localObject1 != null) { if (localObject2 != null) { try { ((Writer)localObject1).close(); } catch (Throwable localThrowable15) { ((Throwable)localObject2).addSuppressed(localThrowable15); } } else { ((Writer)localObject1).close(); } } } } } } } finally { paramHttpServletResponse.setContentType("text/plain; charset=UTF-8"); PrintWriter localPrintWriter = paramHttpServletResponse.getWriter();Object localObject11 = null; try { localJSONStreamAware.writeJSONString(localPrintWriter); } catch (Throwable localThrowable17) { localObject11 = localThrowable17;throw localThrowable17; } finally { if (localPrintWriter != null) { if (localObject11 != null) { try { localPrintWriter.close(); } catch (Throwable localThrowable18) { localObject11.addSuppressed(localThrowable18); } } else { localPrintWriter.close(); } } } } } }
package in.raveesh.pacemaker; import android.app.AlarmManager; import android.app.IntentService; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.os.Build; import android.os.IBinder; import android.os.SystemClock; import android.support.annotation.NonNull; import android.util.Log; import java.io.File; import java.util.List; /** * Android Service which generates pace to GCM. * * Service will store all settings. Also if process is master, it will take responsibility to generate paces. * Master process is the oldest process in system that can receive broadcast events with action "in.raveesh.pacemaker.ACTION_MANAGE_PACEMAKER". * All other processes just stores settings. Once master process uninstalled from system, next oldest process takes responsibility to be master. * Older process is - more settings it collected from others, that is why oldest one will be master. * * Service should be started * - when some app requested to add new pace * - when some app requested cancel of pace * - when package uninstalled (called from PacemakerReceiver) * - when device restarted (called from PacemakerReceiver) * - when pace should be generated (called by AlarmManager) * * @author Badoo */ public class PacemakerService extends IntentService { public static final String ACTION_MANAGE_PACEMAKER = "in.raveesh.pacemaker.ACTION_MANAGE_PACEMAKER"; private static final boolean DEBUG = Pacemaker.DEBUG; private static final String TAG = Pacemaker.TAG; private static final Intent GTALK_HEART_BEAT_INTENT = new Intent("com.google.android.intent.action.GTALK_HEARTBEAT"); private static final Intent MCS_MCS_HEARTBEAT_INTENT = new Intent("com.google.android.intent.action.MCS_HEARTBEAT"); private static final int TYPE_LINEAR = 1; private static final int COMMAND_ADD = 1; // new pace settings private static final int COMMAND_PACE = 2; // time to generate pace private static final int COMMAND_SYNC = 3; // when any package been unistalled private static final int COMMAND_BOOT = 4; // when device restarted private static final int COMMAND_CANCEL = 5; // remove of one pace setting private static final String EXTRA_COMMAND = "command"; private static final String EXTRA_TYPE = "type"; private static final String EXTRA_DELAY = "delay"; private static final String EXTRA_PACKAGE_NAME = "package"; private static final int UNKNOWN_VALUE = 0; private Scheduler mScheduler; public PacemakerService() { super("PacemakerScheduler"); } @Override public IBinder onBind(@NonNull Intent intent) { throw new UnsupportedOperationException("Not supported"); } @Override protected void onHandleIntent(Intent intent) { if (intent == null) { // Surprise :) return; } if (!ACTION_MANAGE_PACEMAKER.equals(intent.getAction())) { return; } int command = intent.getIntExtra(EXTRA_COMMAND, UNKNOWN_VALUE); int type = intent.getIntExtra(EXTRA_TYPE, UNKNOWN_VALUE); switch (command) { case COMMAND_ADD: onAdd(type, intent); break; case COMMAND_PACE: onPace(); break; case COMMAND_SYNC: onSync(); break; case COMMAND_BOOT: onBoot(); break; case COMMAND_CANCEL: onCancel(type, intent); break; } } private void onBoot() { if (DEBUG) Log.d(TAG, "onBoot"); getScheduler().resetStartTime(); scheduleAlarmIfProcessIsMaster(); } private void onSync() { if (DEBUG) Log.d(TAG, "onSync"); boolean updated = getScheduler().syncKeys(new Scheduler.KeysValidator() { @Override public boolean isValid(String key) { try { getPackageManager().getPackageInfo(key, 0); return true; } catch (PackageManager.NameNotFoundException e) { return false; } } }); if (updated) { scheduleAlarmIfProcessIsMaster(); } } private void onAdd(int type, Intent intent) { final String packageName = intent.getStringExtra(EXTRA_PACKAGE_NAME); switch (type) { case TYPE_LINEAR: onAddLinear(packageName, intent.getLongExtra(EXTRA_DELAY, UNKNOWN_VALUE)); break; } } private void onCancel(int type, Intent intent) { final String packageName = intent.getStringExtra(EXTRA_PACKAGE_NAME); switch (type) { case TYPE_LINEAR: onCancelLinear(packageName); break; } } private void onCancelLinear(String packageName) { if (DEBUG) Log.d(TAG, "onCancelLinear from " + packageName); if (getScheduler().removeLinearPace(packageName)) { scheduleAlarmIfProcessIsMaster(); } } private void onAddLinear(String packageName, long delay) { if (DEBUG) Log.d(TAG, "onAddLinear from " + packageName); if (getScheduler().addLinearPace(packageName, delay)) { scheduleAlarmIfProcessIsMaster(); } } private void onPace() { try { if (DEBUG) { Log.d(TAG, "onPace"); return; } sendBroadcast(GTALK_HEART_BEAT_INTENT); sendBroadcast(MCS_MCS_HEARTBEAT_INTENT); } finally { scheduleAlarmIfProcessIsMaster(); } } private void scheduleAlarmIfProcessIsMaster() { final Scheduler scheduler = getScheduler(); PendingIntent pendingIntent = PendingIntent.getService(this, 0, Launcher.createPaceIntent(this), PendingIntent.FLAG_CANCEL_CURRENT); AlarmManager alarmManager = (AlarmManager) getSystemService(Context.ALARM_SERVICE); long nextTriggerTime = scheduler.getNextTriggerTime(); if (isMaster() && nextTriggerTime > 0) { if (DEBUG) { Log.d(TAG, "registering next alarm at " + nextTriggerTime + " which will occur in " + ((nextTriggerTime - SystemClock.elapsedRealtime()) / 1000) + " seconds"); } alarmManager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, nextTriggerTime, pendingIntent); } else { if (DEBUG) Log.d(TAG, "cancelling alarms, if they exist"); alarmManager.cancel(pendingIntent); // We could be master in the past, but now we are not anymore } } private boolean isMaster() { Intent intent = new Intent(ACTION_MANAGE_PACEMAKER); if (Build.VERSION.SDK_INT >= 12) { intent.addFlags(Intent.FLAG_INCLUDE_STOPPED_PACKAGES); } List<ResolveInfo> infos = getPackageManager().queryBroadcastReceivers(intent, 0); if (infos == null) { return false; } if (infos.isEmpty()) { return false; } PackageInfo masterPackage = null; for (ResolveInfo i : infos) { try { String packageName = i.activityInfo.packageName; PackageInfo currPackage = getPackageManager().getPackageInfo(packageName, 0); if (masterPackage == null) { masterPackage = currPackage; } else if (currPackage.firstInstallTime < masterPackage.firstInstallTime){ masterPackage = currPackage; } else if (currPackage.firstInstallTime == masterPackage.firstInstallTime && currPackage.packageName.compareTo(masterPackage.packageName) < 0) { masterPackage = currPackage; } } catch (Exception e) { Log.w(TAG, "Failed to get package info", e); } } return masterPackage != null && getPackageName().equals(masterPackage.packageName); } private Scheduler getScheduler() { if (mScheduler == null) { mScheduler = new Scheduler(); mScheduler.setData(new File(getFilesDir(), "pacemaker.data")); } return mScheduler; } /** * Work with service using this utility methods */ public static class Launcher { static void startOnBootCompleted(Context ctx) { Intent intent = new Intent(ctx, PacemakerService.class); intent.setAction(ACTION_MANAGE_PACEMAKER); intent.putExtra(EXTRA_COMMAND, COMMAND_BOOT); ctx.startService(intent); } static void startOnPackageRemoved(Context ctx) { Intent intent = new Intent(ctx, PacemakerService.class); intent.setAction(ACTION_MANAGE_PACEMAKER); intent.putExtra(EXTRA_COMMAND, COMMAND_BOOT); ctx.startService(intent); } static void startOnManage(Context ctx, Intent caller) { Intent intent = new Intent(ctx, PacemakerService.class); intent.setAction(ACTION_MANAGE_PACEMAKER); copyExtras(caller, intent); ctx.startService(intent); } static void scheduleLinear(Context ctx, long delay) { Intent intent = new Intent(ACTION_MANAGE_PACEMAKER); if (Build.VERSION.SDK_INT >= 12) { intent.addFlags(Intent.FLAG_INCLUDE_STOPPED_PACKAGES); } intent.putExtra(EXTRA_COMMAND, COMMAND_ADD); intent.putExtra(EXTRA_TYPE, TYPE_LINEAR); intent.putExtra(EXTRA_DELAY, delay); intent.putExtra(EXTRA_PACKAGE_NAME, ctx.getPackageName()); ctx.sendBroadcast(intent); } public static void cancelLinear(Context ctx) { Intent intent = new Intent(ACTION_MANAGE_PACEMAKER); if (Build.VERSION.SDK_INT >= 12) { intent.addFlags(Intent.FLAG_INCLUDE_STOPPED_PACKAGES); } intent.putExtra(EXTRA_COMMAND, COMMAND_CANCEL); intent.putExtra(EXTRA_TYPE, TYPE_LINEAR); intent.putExtra(EXTRA_PACKAGE_NAME, ctx.getPackageName()); ctx.sendBroadcast(intent); } private static Intent createPaceIntent(Context ctx) { Intent result = new Intent(ctx, PacemakerService.class); result.setAction(ACTION_MANAGE_PACEMAKER); result.putExtra(EXTRA_COMMAND, COMMAND_PACE); return result; } private static void copyExtras(Intent from, Intent to) { to.putExtra(EXTRA_COMMAND, from.getIntExtra(EXTRA_COMMAND, UNKNOWN_VALUE)); to.putExtra(EXTRA_DELAY, from.getLongExtra(EXTRA_DELAY, UNKNOWN_VALUE)); to.putExtra(EXTRA_TYPE, from.getIntExtra(EXTRA_TYPE, UNKNOWN_VALUE)); to.putExtra(EXTRA_PACKAGE_NAME, from.getStringExtra(EXTRA_PACKAGE_NAME)); } } }
/* * Copyright 2009-2013 University of Hildesheim, Software Systems Engineering * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.ssehub.easy.varModel.persistency; import java.io.BufferedWriter; import java.io.IOException; import java.io.Writer; import java.util.ArrayList; import java.util.List; import net.ssehub.easy.basics.logger.EASyLoggerFactory; import net.ssehub.easy.basics.logger.EASyLoggerFactory.EASyLogger; import net.ssehub.easy.basics.modelManagement.Version; import net.ssehub.easy.varModel.Bundle; import net.ssehub.easy.varModel.cst.ConstantValue; import net.ssehub.easy.varModel.cst.ConstraintSyntaxTree; import net.ssehub.easy.varModel.cst.IConstraintTreeVisitor; import net.ssehub.easy.varModel.cst.UnresolvedExpression; import net.ssehub.easy.varModel.model.AbstractVisitor; import net.ssehub.easy.varModel.model.AttributeAssignment; import net.ssehub.easy.varModel.model.Comment; import net.ssehub.easy.varModel.model.Constraint; import net.ssehub.easy.varModel.model.ContainableModelElement; import net.ssehub.easy.varModel.model.FreezeBlock; import net.ssehub.easy.varModel.model.IFreezable; import net.ssehub.easy.varModel.model.IModelElement; import net.ssehub.easy.varModel.model.IModelVisitor; import net.ssehub.easy.varModel.model.IPartialEvaluable; import net.ssehub.easy.varModel.model.ModelElement; import net.ssehub.easy.varModel.model.PartialEvaluationBlock; import net.ssehub.easy.varModel.model.Project; import net.ssehub.easy.varModel.model.ProjectInterface; import net.ssehub.easy.varModel.model.datatypes.Compound; import net.ssehub.easy.varModel.model.datatypes.OrderedEnum; import net.ssehub.easy.varModel.model.values.ConstraintValue; import net.ssehub.easy.varModel.model.values.IValueVisitor; import net.ssehub.easy.varModel.model.values.Value; /** * Super class for translating the variability model to a <code>StringBuffer</code>, which can be used for saving. * @author El-Sharkawy * * @see net.ssehub.easy.varModel.model.values.IValueVisitor * @see Project */ public abstract class AbstractVarModelWriter extends AbstractVisitor implements IValueVisitor, IConstraintTreeVisitor { /** * Defines a dummy model element for additional formatting. */ protected static final ModelElement DUMMY_PARENT = new ModelElement("dummy") { @Override public void accept(IModelVisitor visitor) { // do nothing } }; /** * The number of whitespaces per indentation (if {@link #useWhitespace} is enabled), otherwise one tab is inserted. * Currently we assume that one central configuration for the indentation is sufficient and that it will not be * changed during the output of this or subclasses. */ private static String indentStep = " "; /** * Use whitespaces or tabs. * Currently we assume that one central configuration for the indentation is sufficient and that it will not be * changed during the output of this or subclasses. */ private static boolean useWhitespace = true; /** * Detect, warn, report about and write to avoid OCL compliance problems. */ private static boolean oclCompliance = false; private String myIndentStep = indentStep; private boolean myUseWhitespace = useWhitespace; private int additionalIndentation = 0; /** * Stores the all parents of the currently visited element.<br/> * Can be an empty list in case of root elements. */ private List<IModelElement> parents; /** * This attribute should be used for storing the output. */ private Writer out; private IModelElement expressionContext; /** * Defines default space locations. * * @author Holger Eichelberger */ protected enum DefaultSpace { /** * The default space for project. */ PROJECT } /** * Sole constructor for this class. * @param writer Writer which should be used for writing the output. */ protected AbstractVarModelWriter(Writer writer) { parents = new ArrayList<IModelElement>(); out = new BufferedWriter(writer); } /** * Changes the expression context. * * @param expressionContext the expression context */ protected void setExpressionContext(IModelElement expressionContext) { this.expressionContext = expressionContext; } /** * Returns the expression context. * * @return the expression context */ protected IModelElement getExpressionContext() { return expressionContext; } /** * Defines the number of spaces used in one indentation step if {@link #useWhitespace}. * * @param count the number of spaces (negative values are ignored) */ public static void setIndentStep(int count) { indentStep = deriveIndentStep(count); } /** * Defines the number of spaces used in one indentation step if {@link #useWhitespace} for this writer. * * @param count the number of spaces (negative values are ignored) */ public void setIndentationStep(int count) { if (count != myIndentStep.length()) { myIndentStep = deriveIndentStep(count); } } /** * Changes the OCL compliance setting. * * @param compliance operate with OCL compliance (<code>true</code>), allow both IVML + OCL (<code>false</code> */ public static void setOclCompliance(boolean compliance) { oclCompliance = compliance; } /** * Returns whether OCL compliance shall be considered. * * @return <code>true</code> for OCL compliance, <code>false</code> else */ public static boolean considerOclCompliance() { return oclCompliance; } /** * Defines whether whitespaces or tabs shall be used for indentation in this editor. * * @param useWhitespaces if <code>true</code> whitespaces, tabs if <code>false</code> */ public void setUseWhitespaces(boolean useWhitespaces) { this.myUseWhitespace = useWhitespaces; } /** * Creates the indentation step in terms of numbers of spaces to be used. * * @param count the number of spaces (negative values are ignored) * @return the indentation step */ private static String deriveIndentStep(int count) { count = Math.max(0, count); StringBuilder tmp = new StringBuilder(); for (int i = 0; i < count; i++) { tmp.append(" "); } return tmp.toString(); } /** * Return the indent-step for ivml. * @return indent step. */ public static String getIvmlIndentStep() { return indentStep; } /** * Defines whether whitespaces or tabs shall be used for indentation. * * @param use if <code>true</code> whitespaces, tabs if <code>false</code> */ public static void setUseIvmlWhitespace(boolean use) { useWhitespace = use; } /** * Defines whether whitespaces or tabs shall be used for indentation. * * @return useWhitespace if <code>true</code> whitespaces, tabs if <code>false</code> * */ public static boolean getUseIvmlWhitespace() { return useWhitespace; } /** * Changes the writer. (for reuse) * * @param writer the new writer */ public void setWriter(Writer writer) { out = writer; } /** * Returns the actual writer. * * @return the actual writer */ public Writer getWriter() { return out; } // /** // * Getter for the output writer. // * @return string buffer containing the output // */ // public BufferedWriter getOutput() { // return out; // } /** * Method to append the output. * @param appendableOutput String containing the output, which should be added. */ protected final void appendOutput(String appendableOutput) { try { out.write(appendableOutput); } catch (IOException e) { getLogger().exception(e); } } /** * Method to append the output. * @param appendableOutput char containing the output, which should be added. */ protected final void appendOutput(char appendableOutput) { try { out.write(appendableOutput); } catch (IOException e) { getLogger().exception(e); } } /** * Method to append the current indentation. */ protected final void appendIndentation() { try { out.write(getIndentation().toString()); } catch (IOException e) { getLogger().exception(e); } } /** * Flushes the writer. Should be used if there aren't any more elements to read. * @throws IOException If an I/O error occurs */ public final void flush() throws IOException { out.flush(); } /** * Returns the current indentation for (nested) elements. * @return The correct indentation for nested elements or an empty StringBuffer in case of an top level element. */ protected StringBuffer getIndentation() { int depth = parents.size(); StringBuffer indent = new StringBuffer(); for (int i = 0; i < depth + additionalIndentation; i++) { if (myUseWhitespace) { indent.append(myIndentStep); } else { indent.append("\t"); } } return indent; } /** * Returns the direct parent of the current visited object. Ignors {@link #DUMMY_PARENT}. * @return The direct parent or <b>null</b> in case of an element from the top layer. */ protected IModelElement getParent() { IModelElement parent = null; if (parents.size() > 0) { int lastPosition = parents.size() - 1; while (lastPosition >= 0 && DUMMY_PARENT == parents.get(lastPosition)) { lastPosition--; } parent = parents.get(lastPosition); } return parent; } /** * Returns the latest parent with the given <code>type</code>. * * @param <T> the actual type of the parent to be returned * @param type the type to search for * @return the latest parent with given <code>type</code> or {@link #getParent()} if <code>type==<b>null</b></code> */ protected <T extends IModelElement> T getParent(Class<T> type) { T result = null; for (int p = parents.size() - 1; null == result && p >= 0; p--) { IModelElement tmp = parents.get(p); if (type.isInstance(tmp)) { result = type.cast(tmp); } } return result; } /** * Removes the last parent from the list of all parents. * This method should be used inside the <b>after</b> visit methods. */ protected void removeLastParent() { if (parents.size() > 0) { int lastPosition = parents.size() - 1; parents.remove(lastPosition); } } /** * Adds the given element to the list of all parents. * * @param parent the parent to be added */ protected void addParent(IModelElement parent) { parents.add(parent); } /** * Returns whether comments are emitted by this writer. * * @return <code>true</code> if comments are emitted, <code>false</code> else */ public abstract boolean emitComments(); /** * Prints default spaces. * * @param location the intended location of the space */ protected abstract void printDefaultSpace(DefaultSpace location); /** * Processes a version information. * * @param version the version to be processed */ protected abstract void processVersion(Version version); @Override public void visitProject(Project project) { //Write beginning appendIndentation(); startWritingProject(project); //Add project to list of parents (for writing nested elements) parents.add(project); if (project.getVersion() != null) { boolean defltSpace = true; Comment comment = project.getNestedComment(project.getVersion()); if (null != comment) { defltSpace = false; appendOutput(comment.getName()); } if (defltSpace) { printDefaultSpace(DefaultSpace.PROJECT); } processVersion(project.getVersion()); } else { printDefaultSpace(DefaultSpace.PROJECT); } int count; //Projects imports count = project.getImportsCount(); for (int p = 0; p < count; p++) { project.getImport(p).accept(this); } //Elements on top layer inside the project count = project.getElementCount(); for (int c = 0; c < count; c++) { ContainableModelElement element = project.getElement(c); if (element instanceof ProjectInterface) { project.getElement(c).accept(this); } } count = project.getAttributesCount(); for (int a = project.getAttributesCount(); a < count; a++) { project.getAttribute(a).accept(this); } //Elements on top layer inside the project count = project.getElementCount(); for (int c = 0; c < count; c++) { ContainableModelElement element = project.getElement(c); if (!(element instanceof ProjectInterface)) { project.getElement(c).accept(this); } } //No more nested elements, remove project from list of parents removeLastParent(); //write end of project. appendIndentation(); endWritingProject(project); //Project is closed, there shouldn't be any further elements to read. Thus, flush the writer. try { flush(); } catch (IOException e) { getLogger().exception(e); } } /** * Method for visiting an orded enum. * * @param eenum The enum which should be visited. */ public void visitOrderedEnum(OrderedEnum eenum) { visitEnum(eenum); } @Override public void visitCompound(Compound compound) { //Write beginning appendIndentation(); startWritingCompound(compound); //Add compound to list of parents (for writing nested elements) parents.add(compound); //Nested DecisionVariableDeclarations int meCount = compound.getModelElementCount(); for (int e = 0; e < meCount; e++) { ContainableModelElement elt = compound.getModelElement(e); beforeNestedElement(elt); elt.accept(this); } //Close Compound removeLastParent(); //write end of project. appendIndentation(); endWritingCompound(compound); } @Override public void visitAttributeAssignment(AttributeAssignment assignment) { parents.add(assignment); for (int m = 0; m < assignment.getModelElementCount(); m++) { assignment.getModelElement(m).accept(this); } removeLastParent(); } /** * Method for writing a compound, first part.<br/> * @param compound The compound which should be saved. */ protected abstract void startWritingCompound(Compound compound); /** * Method for writing a compound, second part.<br/> * @param compound The compound which should be saved. */ protected abstract void endWritingCompound(Compound compound); /** * Method for writing a project, first part.<br/> * @param project The project which should be saved. */ protected abstract void startWritingProject(Project project); /** * Method for writing a project, second part.<br/> * @param project The project which should be saved. */ protected abstract void endWritingProject(Project project); @Override public void visitConstantValue(ConstantValue value) { Value val = value.getConstantValue(); if (null != val) { // dispatch ;) val.accept(this); } else { appendOutput("<null>"); // debugging only! } } @Override public void visitConstraintValue(ConstraintValue value) { ConstraintSyntaxTree val = value.getValue(); if (null != val) { emitConstraintExpression(expressionContext, val); } } @Override public void visitConstraint(Constraint constraint) { ConstraintSyntaxTree cst = constraint.getConsSyntax(); if (null != cst) { emitConstraintExpression(constraint, constraint.getConsSyntax()); } } /** * Visits the expression of a constraint (for extension). * * @param context the visiting context * @param constraint the (specified) constraint; */ protected void emitConstraintExpression(IModelElement context, ConstraintSyntaxTree constraint) { expressionContext = context; constraint.accept(this); expressionContext = null; } @Override public void visitPartialEvaluationBlock(PartialEvaluationBlock block) { parents.add(block); for (int e = 0; e < block.getModelElementCount(); e++) { ContainableModelElement cme = block.getModelElement(e); if (!(cme instanceof IPartialEvaluable)) { block.getModelElement(e).accept(this); } } for (int e = 0; e < block.getEvaluableCount(); e++) { IPartialEvaluable evaluable = block.getEvaluable(e); beforeNestedElement(evaluable); evaluable.accept(this); } removeLastParent(); } @Override public void visitFreezeBlock(FreezeBlock freeze) { parents.add(freeze); for (int f = 0; f < freeze.getFreezableCount(); f++) { IFreezable freezable = freeze.getFreezable(f); beforeNestedElement(freezable); freezable.accept(this); } removeLastParent(); } @Override public void visitUnresolvedExpression(UnresolvedExpression expression) { if (expression.isLeaf()) { appendOutput(expression.getUnresolvedLeaf()); } else { ConstraintSyntaxTree actual = expression.getActualExpression(); if (null != actual) { actual.accept(this); } } } /** * This method is called before a nested element is emitted. This enables the output of comments before the * <code>element</code> etc. * * @param element the element (no specific type needed, see {@link Comment} */ protected void beforeNestedElement(Object element) { } /** * Returns the number of hierarchically visited parents. * * @return the number of hierarchically visited parents */ protected int getParentCount() { return parents.size(); } /** * Returns the specified hierarchically visited parent. * * @param index the 0-based index of the visited parent * @return the specified visited parent * @throws IndexOutOfBoundsException if <code>index &lt; 0 || index &gt;= {@link #getParentCount()}</code> */ protected IModelElement getParent(int index) { return parents.get(index); } /** * Increases the additional indentation. */ protected void increaseAdditionalIndentation() { additionalIndentation++; } /** * Decreases the additional indentation. */ protected void decreaseAdditionalIndentation() { if (additionalIndentation > 0) { additionalIndentation--; } } /** * Returns the actual logger instance. * * @return the logger instance */ protected EASyLogger getLogger() { return EASyLoggerFactory.INSTANCE.getLogger(getClass(), Bundle.ID); } }
/** * Copyright 2014-2017 Riccardo Massera (TheCoder4.Eu). * * This file is part of BootsFaces. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.bootsfaces.render; import java.io.IOException; import java.lang.annotation.Annotation; import java.util.List; import java.util.Map; import javax.el.ValueExpression; import javax.faces.FacesException; import javax.faces.application.ProjectStage; import javax.faces.component.EditableValueHolder; import javax.faces.component.UIComponent; import javax.faces.component.UIForm; import javax.faces.component.UIInput; import javax.faces.component.ValueHolder; import javax.faces.component.behavior.ClientBehavior; import javax.faces.component.behavior.ClientBehaviorHolder; import javax.faces.context.FacesContext; import javax.faces.context.ResponseWriter; import javax.faces.convert.Converter; import javax.faces.convert.ConverterException; import javax.faces.render.Renderer; import net.bootsfaces.beans.ELTools; import net.bootsfaces.component.ajax.AJAXRenderer; import net.bootsfaces.component.form.Form; import net.bootsfaces.utils.BsfUtils; import net.bootsfaces.utils.FacesMessages; public class CoreRenderer extends Renderer { /** * Method that provide ability to render pass through attributes. * * @param context * @param component * @param attrs * @throws IOException */ protected void renderPassThruAttributes(FacesContext context, UIComponent component, String[] attrs, boolean shouldRenderDataAttributes) throws IOException { ResponseWriter writer = context.getResponseWriter(); if ((attrs == null || attrs.length <= 0) && shouldRenderDataAttributes == false) return; // pre-defined attributes for (String attribute : component.getAttributes().keySet()) { boolean attributeToRender = false; if (shouldRenderDataAttributes && attribute.startsWith("data-")) { attributeToRender = true; } if (!attributeToRender && attrs != null) { for (String ca : attrs) { if (attribute.equals(ca)) { attributeToRender = true; break; } } } if (attributeToRender) { Object value = component.getAttributes().get(attribute); if (shouldRenderAttribute(value)) writer.writeAttribute(attribute, value.toString(), attribute); } } } protected void renderPassThruAttributes(FacesContext context, UIComponent component, String[] attrs) throws IOException { ResponseWriter writer = context.getResponseWriter(); // pre-defined attributes if (attrs != null && attrs.length > 0) { for (String attribute : attrs) { Object value = component.getAttributes().get(attribute); if (shouldRenderAttribute(value)) writer.writeAttribute(attribute, value.toString(), attribute); } } } /** * @deprecated Use * {@link CoreRenderer#generateErrorAndRequiredClass(javax.faces.component.UIInput, javax.faces.context.ResponseWriter, java.lang.String, java.lang.String, java.lang.String, java.lang.String) } * instead * * Renders the CSS pseudo classes for required fields and for the * error levels. * * @param input * @param rw * @param clientId * @throws IOException */ @Deprecated public void generateErrorAndRequiredClassForLabels(UIInput input, ResponseWriter rw, String clientId, String additionalClass) throws IOException { String styleClass = getErrorAndRequiredClass(input, clientId); if (null != additionalClass) { additionalClass = additionalClass.trim(); if (additionalClass.trim().length() > 0) { styleClass += " " + additionalClass; } } UIForm currentForm = AJAXRenderer.getSurroundingForm((UIComponent) input, true); if (currentForm instanceof Form) { if (((Form) currentForm).isHorizontal()) { styleClass += " control-label"; } } if (input instanceof IResponsiveLabel) { String responsiveLabelClass = Responsive.getResponsiveLabelClass((IResponsiveLabel) input); if (null != responsiveLabelClass) { styleClass += " " + responsiveLabelClass; } } rw.writeAttribute("class", styleClass, "class"); } /** * Renders the CSS pseudo classes for required fields and for the error levels. * * @param input * @param rw * @param clientId * @throws IOException */ protected void generateErrorAndRequiredClass(UIInput input, ResponseWriter rw, String clientId, String additionalClass1, String additionalClass2, String additionalClass3) throws IOException { String styleClass = getErrorAndRequiredClass(input, clientId); if (null != additionalClass1) { additionalClass1 = additionalClass1.trim(); if (additionalClass1.trim().length() > 0) { styleClass += " " + additionalClass1; } } if (null != additionalClass2) { additionalClass2 = additionalClass2.trim(); if (additionalClass2.trim().length() > 0) { styleClass += " " + additionalClass2; } } if (null != additionalClass3) { additionalClass3 = additionalClass3.trim(); if (additionalClass3.trim().length() > 0) { styleClass += " " + additionalClass3; } } rw.writeAttribute("class", styleClass, "class"); } /** * Yields the value of the required and error level CSS class. * * @param input * must not be null * @param clientId * must not be null * @return can never be null */ public String getErrorAndRequiredClass(UIInput input, String clientId) { String styleClass = ""; if (BsfUtils.isLegacyFeedbackClassesEnabled()) { styleClass = FacesMessages.getErrorSeverityClass(clientId); } if (input.isRequired()) { styleClass += " bf-required"; } else { Annotation[] readAnnotations = ELTools.readAnnotations(input); if (null != readAnnotations && readAnnotations.length > 0) { for (Annotation a : readAnnotations) { if ((a.annotationType().getName().endsWith("NotNull")) || (a.annotationType().getName().endsWith("NotEmpty")) || (a.annotationType().getName().endsWith("NotBlank"))) { styleClass += " bf-required"; break; } } } } return styleClass; } protected boolean shouldRenderAttribute(Object value) { if (value == null) return false; if (value instanceof Boolean) { return ((Boolean) value); } else if (value instanceof Number) { Number number = (Number) value; if (value instanceof Integer) return number.intValue() != Integer.MIN_VALUE; else if (value instanceof Double) return number.doubleValue() != Double.MIN_VALUE; else if (value instanceof Long) return number.longValue() != Long.MIN_VALUE; else if (value instanceof Byte) return number.byteValue() != Byte.MIN_VALUE; else if (value instanceof Float) return number.floatValue() != Float.MIN_VALUE; else if (value instanceof Short) return number.shortValue() != Short.MIN_VALUE; } return true; } protected void decodeBehaviors(FacesContext context, UIComponent component) { if (!(component instanceof ClientBehaviorHolder)) { return; } Map<String, List<ClientBehavior>> behaviors = ((ClientBehaviorHolder) component).getClientBehaviors(); if (behaviors.isEmpty()) { return; } Map<String, String> params = context.getExternalContext().getRequestParameterMap(); String behaviorEvent = params.get("javax.faces.behavior.event"); if (null != behaviorEvent) { List<ClientBehavior> behaviorsForEvent = behaviors.get(behaviorEvent); if (behaviorsForEvent != null && !behaviorsForEvent.isEmpty()) { String behaviorSource = params.get("javax.faces.source"); String clientId = component.getClientId(context); if (behaviorSource != null && clientId.equals(behaviorSource)) { for (ClientBehavior behavior : behaviorsForEvent) { behavior.decode(context, component); } } } } } public boolean componentIsDisabledOrReadonly(UIComponent component) { return Boolean.valueOf(String.valueOf(component.getAttributes().get("disabled"))) || Boolean.valueOf(String.valueOf(component.getAttributes().get("readonly"))); } protected String escapeClientId(String clientId) { if (clientId == null) { return null; } // replace colons by underscores to avoid problems with jQuery return clientId.replace(':', '_'); } /** * @param rw * ResponseWriter to be used * @param name * Attribute name to be added * @param value * Attribute value to be added * @param property * Name of the property or attribute (if any) of the * {@link UIComponent} associated with the containing element, to * which this generated attribute corresponds * @throws IllegalStateException * if this method is called when there is no currently open element * @throws IOException * if an input/output error occurs * @throws NullPointerException * if <code>name</code> is <code>null</code> */ protected void writeAttribute(ResponseWriter rw, String name, Object value, String property) throws IOException { if (value == null) { return; } if (value instanceof String) if (((String) value).length() == 0) return; rw.writeAttribute(name, value, property); } /** * @param rw * ResponseWriter to be used * @param name * Attribute name to be added * @param value * Attribute value to be added * @throws IllegalStateException * if this method is called when there is no currently open element * @throws IOException * if an input/output error occurs * @throws NullPointerException * if <code>name</code> is <code>null</code> */ protected void writeAttribute(ResponseWriter rw, String name, Object value) throws IOException { if (value == null) { return; } if (value instanceof String) if (((String) value).length() == 0) return; rw.writeAttribute(name, value, name); } /** * @param rw * ResponseWriter to be used * @param text * Text to be written * @param property * Name of the property or attribute (if any) of the * {@link UIComponent} associated with the containing element, to * which this generated text corresponds * @throws IOException * if an input/output error occurs * @throws NullPointerException * if <code>text</code> is <code>null</code> */ public void writeText(ResponseWriter rw, Object text, String property) throws IOException { if (text == null || text.equals("")) { return; } rw.writeText(text, property); } /** * @param rw * ResponseWriter to be used * @param text * Text to be written * @param component * The {@link UIComponent} (if any) to which this element corresponds * @param property * Name of the property or attribute (if any) of the * {@link UIComponent} associated with the containing element, to * which this generated text corresponds * @throws IOException * if an input/output error occurs * @throws NullPointerException * if <code>text</code> is <code>null</code> */ public void writeText(ResponseWriter rw, Object text, UIComponent component, String property) throws IOException { if (text == null || text.equals("")) { return; } rw.writeText(text, property); } /** * @param rw * ResponseWriter to be used * @param text * Text to be written * @param off * Starting offset (zero-relative) * @param len * Number of characters to be written * @throws IndexOutOfBoundsException * if the calculated starting or ending position is outside the * bounds of the character array * @throws IOException * if an input/output error occurs * @throws NullPointerException * if <code>text</code> is <code>null</code> */ public void writeText(ResponseWriter rw, char text[], int off, int len) throws IOException { if (text == null || text.length <= 0 || "".equals(String.valueOf(text))) { return; } rw.writeText(text, off, len); } protected static void assertComponentIsInsideForm(UIComponent component, String msg) { assertComponentIsInsideForm(component, msg, false); } public static void assertComponentIsInsideForm(UIComponent component, String msg, boolean throwException) { if (!FacesContext.getCurrentInstance().isProjectStage(ProjectStage.Production)) { UIComponent c = component; while ((c != null) && (!(c instanceof UIForm))) { c = c.getParent(); } if (!(c instanceof UIForm)) { System.out.println("Warning: The BootsFaces component " + component.getClass() + " works better if put inside a form. These capabilities get lost if not put in a form:"); if (throwException) { throw new FacesException(msg); } else { System.out.println(msg); } } } } protected static UIForm getSurroundingForm(UIComponent component, boolean lenient) { UIComponent c = component; while ((c != null) && (!(c instanceof UIForm)) && (!(c instanceof Form))) { c = c.getParent(); } if (!(c instanceof UIForm || c instanceof Form)) { if (lenient) { return null; } else { throw new FacesException( "The component with the id " + component.getClientId() + " must be inside a form"); } } return (UIForm) c; } public static boolean isHorizontalForm(UIComponent component) { UIForm c = getSurroundingForm(component, true); if (null != c && c instanceof Form) { return ((Form) c).isHorizontal(); } return false; } /** * Algorithm works as follows; - If it's an input component, submitted value is * checked first since it'd be the value to be used in case validation errors * terminates jsf lifecycle - Finally the value of the component is retrieved * from backing bean and if there's a converter, converted value is returned * * @param fc * FacesContext instance * @param c * UIComponent instance whose value will be returned * @return End text */ public String getValue2Render(FacesContext fc, UIComponent c) { if (c instanceof ValueHolder) { if (c instanceof EditableValueHolder) { Object sv = ((EditableValueHolder) c).getSubmittedValue(); if (sv != null) { return sv.toString(); } } ValueHolder vh = (ValueHolder) c; Object val = vh.getValue(); // format the value as string if (val != null) { /* * OLD Converter converter = getConverter(fc, vh); */ /* NEW */ Converter converter = vh.getConverter(); if (converter == null) { Class<?> valueType = val.getClass(); if (valueType == String.class && (null == fc.getApplication().createConverter(String.class))) { return (String) val; } converter = fc.getApplication().createConverter(valueType); } /* END NEW */ if (converter != null) return converter.getAsString(fc, c, val); else return val.toString(); // Use toString as a fallback if // there is no explicit or implicit // converter } else { // component is a value holder but has no value return null; } } // component it not a value holder return null; } /** * Finds the appropriate converter for a given value holder * * @param fc * FacesContext instance * @param vh * ValueHolder instance to look converter for * @return Converter */ public static Converter getConverter(FacesContext fc, ValueHolder vh) { // explicit converter Converter converter = vh.getConverter(); // try to find implicit converter if (converter == null) { ValueExpression expr = ((UIComponent) vh).getValueExpression("value"); if (expr != null) { Class<?> valueType = expr.getType(fc.getELContext()); if (valueType != null) { converter = fc.getApplication().createConverter(valueType); } } } return converter; } /** * This method is called by the JSF framework to get the type-safe value of the * attribute. Do not delete this method. */ @Override public Object getConvertedValue(FacesContext fc, UIComponent c, Object sval) throws ConverterException { Converter cnv = resolveConverter(fc, c, sval); if (cnv != null) { if (sval == null || sval instanceof String) { return cnv.getAsObject(fc, c, (String) sval); } else { return cnv.getAsObject(fc, c, String.valueOf(sval)); } } else { return sval; } } protected Converter resolveConverter(FacesContext context, UIComponent c, Object value) { if (!(c instanceof ValueHolder)) { return null; } Converter cnv = ((ValueHolder) c).getConverter(); if (cnv != null) { return cnv; } else { ValueExpression ve = c.getValueExpression("value"); if (ve != null) { Class<?> valType = ve.getType(context.getELContext()); if (valType != null && (!valType.isPrimitive())) { // workaround for a Mojarra bug (#966) return context.getApplication().createConverter(valType); } else if (valType != null && (value instanceof String)) { // workaround for the workaround of the Mojarra bug (#977) return context.getApplication().createConverter(valType); } } return null; } } /** * Returns request parameter value for the provided parameter name. * * @param context Faces context. * @param name Parameter name to get value for. * * @return Request parameter value for the provided parameter name. */ public static String getRequestParameter(FacesContext context, String name) { return context.getExternalContext().getRequestParameterMap().get(name); } /** * Returns type of the value attribute's {@link ValueExpression} of the provided component. * * @param component Component to get attribute type for. * * @return Type of the value attribute's {@link ValueExpression} of the provided component. */ public static Class<?> getValueType(UIComponent component) { return getAttributeType(component, "value"); } /** * Returns type of the provided attribute name's {@link ValueExpression} of the provided component. * * @param component Component to get attribute type for. * @param attribute Attribute to get type for. * * @return Type of the provided attribute name's {@link ValueExpression} of the provided component. */ public static Class<?> getAttributeType(UIComponent component, String attribute) { ValueExpression valueExpression = component.getValueExpression(attribute); return valueExpression == null ? null : valueExpression.getType(FacesContext.getCurrentInstance().getELContext()); } public static void endDisabledFieldset(IContentDisabled component, ResponseWriter rw) throws IOException { if (component.isContentDisabled()) { rw.endElement("fieldset"); } } /** * Renders the code disabling every input field and every button within a * container. * * @param component * @param rw * @return true if an element has been rendered * @throws IOException */ public static boolean beginDisabledFieldset(IContentDisabled component, ResponseWriter rw) throws IOException { if (component.isContentDisabled()) { rw.startElement("fieldset", (UIComponent) component); rw.writeAttribute("disabled", "disabled", "null"); return true; } return false; } /** * Get the main field container * * @deprecated Use * {@link CoreInputRenderer#getWithFeedback(net.bootsfaces.render.CoreInputRenderer.InputMode, javax.faces.component.UIComponent)} * instead * * @param additionalClass * @param clientId * @return */ @Deprecated protected String getFormGroupWithFeedback(String additionalClass, String clientId) { if (BsfUtils.isLegacyFeedbackClassesEnabled()) { return additionalClass; } return additionalClass + " " + FacesMessages.getErrorSeverityClass(clientId); } protected void beginResponsiveWrapper(UIComponent component, ResponseWriter responseWriter) throws IOException { if (!(component instanceof IResponsive)) { return; } String responsiveStyleClass = Responsive.getResponsiveStyleClass((IResponsive) component, false); if (!"".equals(responsiveStyleClass)) { responseWriter.startElement("div", component); responseWriter.writeAttribute("class", responsiveStyleClass, null); } } protected void endResponsiveWrapper(UIComponent component, ResponseWriter responseWriter) throws IOException { if (!(component instanceof IResponsive)) { return; } String responsiveStyleClass = Responsive.getResponsiveStyleClass((IResponsive) component, false); if (!"".equals(responsiveStyleClass)) { responseWriter.endElement("div"); } } }
/******************************************************************************* * Copyright (c) 2000, 2011 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.ui.text.java.hover; import java.util.Properties; import org.eclipse.swt.SWT; import org.eclipse.swt.widgets.Shell; import org.eclipse.core.runtime.Assert; import org.eclipse.core.resources.IStorage; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.ToolBarManager; import org.eclipse.jface.internal.text.html.HTMLPrinter; import org.eclipse.jface.text.AbstractReusableInformationControlCreator; import org.eclipse.jface.text.DefaultInformationControl; import org.eclipse.jface.text.IInformationControl; import org.eclipse.jface.text.IInformationControlCreator; import org.eclipse.jface.text.IInformationControlExtension2; import org.eclipse.jface.text.IRegion; import org.eclipse.jface.text.ITextViewer; import org.eclipse.jface.text.Region; import org.eclipse.ui.IEditorPart; import org.eclipse.ui.editors.text.EditorsUI; import org.eclipse.jdt.core.IField; import org.eclipse.jdt.core.IType; import org.eclipse.jdt.core.ITypeRoot; import org.eclipse.jdt.core.JavaModelException; import org.eclipse.jdt.core.Signature; import org.eclipse.jdt.core.dom.ASTNode; import org.eclipse.jdt.core.dom.CompilationUnit; import org.eclipse.jdt.core.dom.IBinding; import org.eclipse.jdt.core.dom.NodeFinder; import org.eclipse.jdt.core.dom.QualifiedName; import org.eclipse.jdt.core.dom.SimpleName; import org.eclipse.jdt.core.dom.StringLiteral; import org.eclipse.jdt.core.dom.TypeDeclaration; import org.eclipse.jdt.internal.corext.refactoring.nls.AccessorClassReference; import org.eclipse.jdt.internal.corext.refactoring.nls.NLSHintHelper; import org.eclipse.jdt.ui.SharedASTProvider; import org.eclipse.jdt.internal.ui.JavaPluginImages; import org.eclipse.jdt.internal.ui.javaeditor.JavaEditor; import org.eclipse.jdt.internal.ui.javaeditor.NLSKeyHyperlink; import org.eclipse.jdt.internal.ui.text.correction.ASTResolving; /** * Provides externalized string as hover info for NLS key. * * @since 3.1 */ public class NLSStringHover extends AbstractJavaEditorTextHover { /* * @see org.eclipse.jdt.internal.ui.text.java.hover.AbstractJavaEditorTextHover#getHoverRegion(org.eclipse.jface.text.ITextViewer, int) */ @Override public IRegion getHoverRegion(ITextViewer textViewer, int offset) { if (!(getEditor() instanceof JavaEditor)) return null; ITypeRoot je= getEditorInputJavaElement(); if (je == null) return null; // Never wait for an AST in UI thread. CompilationUnit ast= SharedASTProvider.getAST(je, SharedASTProvider.WAIT_NO, null); if (ast == null) return null; ASTNode node= NodeFinder.perform(ast, offset, 1); if (node instanceof StringLiteral) { StringLiteral stringLiteral= (StringLiteral)node; return new Region(stringLiteral.getStartPosition(), stringLiteral.getLength()); } else if (node instanceof SimpleName) { SimpleName simpleName= (SimpleName)node; return new Region(simpleName.getStartPosition(), simpleName.getLength()); } return null; } /** * @deprecated see {@link org.eclipse.jface.text.ITextHover#getHoverInfo(ITextViewer, IRegion)} */ public String getHoverInfo(ITextViewer textViewer, IRegion hoverRegion) { NLSHoverControlInput info= internalGetHoverInfo(textViewer, hoverRegion); return info == null ? null : info.fInformation; } /* * @see org.eclipse.jface.text.ITextHoverExtension2#getHoverInfo2(org.eclipse.jface.text.ITextViewer, org.eclipse.jface.text.IRegion) */ @Override public Object getHoverInfo2(ITextViewer textViewer, IRegion hoverRegion) { return internalGetHoverInfo(textViewer, hoverRegion); } /** * Returns the hover input. * * @param textViewer the viewer on which the hover popup should be shown * @param hoverRegion the text range in the viewer which is used to determine the hover display * information * @return the hover popup display input, or <code>null</code> if none available * * @see #getHoverInfo2(ITextViewer, IRegion) */ private NLSHoverControlInput internalGetHoverInfo(ITextViewer textViewer, IRegion hoverRegion) { if (!(getEditor() instanceof JavaEditor)) return null; ITypeRoot je= getEditorInputJavaElement(); if (je == null) return null; CompilationUnit ast= SharedASTProvider.getAST(je, SharedASTProvider.WAIT_ACTIVE_ONLY, null); if (ast == null) return null; ASTNode node= NodeFinder.perform(ast, hoverRegion.getOffset(), hoverRegion.getLength()); if (!(node instanceof StringLiteral) && !(node instanceof SimpleName)) return null; if (node.getLocationInParent() == QualifiedName.QUALIFIER_PROPERTY) return null; boolean usedFullyQualifiedName= false; IBinding containingClassBinding= null; ASTNode containingClass= ASTResolving.findParentType(node); if (containingClass instanceof TypeDeclaration) { containingClassBinding= ((TypeDeclaration)containingClass).resolveBinding(); ASTNode parentNode= node.getParent(); if (parentNode instanceof QualifiedName) { IBinding qualifierBinding= (((QualifiedName)parentNode).getQualifier()).resolveBinding(); if (qualifierBinding != null && containingClassBinding != null) { usedFullyQualifiedName= qualifierBinding.getKey().equals(containingClassBinding.getKey()); } } } AccessorClassReference ref= NLSHintHelper.getAccessorClassReference(ast, hoverRegion, usedFullyQualifiedName); if (ref == null) return null; String identifier= null; if (node instanceof StringLiteral) { identifier= ((StringLiteral)node).getLiteralValue(); } else if (!usedFullyQualifiedName && node.getLocationInParent() == QualifiedName.NAME_PROPERTY) { identifier= ((SimpleName)node).getIdentifier(); } else { try { if (containingClassBinding == null) return null; IType parentType= (IType)containingClassBinding.getJavaElement(); if (parentType == null) return null; String varName= ((SimpleName)node).getIdentifier(); IField field= parentType.getField(varName); if (!Signature.getSignatureSimpleName(field.getTypeSignature()).equals("String")) //$NON-NLS-1$ return null; Object obj= field.getConstant(); identifier= obj instanceof String ? ((String)obj).substring(1, ((String)obj).length() - 1) : null; } catch (JavaModelException e) { return null; } } if (identifier == null) return null; IStorage propertiesFile; try { propertiesFile= NLSHintHelper.getResourceBundle(je.getJavaProject(), ref); if (propertiesFile == null) return new NLSHoverControlInput(toHtml(JavaHoverMessages.NLSStringHover_NLSStringHover_PropertiesFileNotDetectedWarning, "", null, false), (IStorage)null, "", getEditor()); //$NON-NLS-1$ //$NON-NLS-2$ } catch (JavaModelException ex) { return null; } final String propertiesFileName= propertiesFile.getName(); Properties properties= null; try { properties= NLSHintHelper.getProperties(propertiesFile); } catch (IllegalArgumentException e) { return new NLSHoverControlInput(toHtml(propertiesFileName, JavaHoverMessages.NLSStringHover_NLSStringHover_PropertiesFileCouldNotBeReadWarning, e.getLocalizedMessage(), false), propertiesFile, identifier, getEditor()); } if (properties == null) return null; if (properties.isEmpty()) return new NLSHoverControlInput(toHtml(propertiesFileName, JavaHoverMessages.NLSStringHover_NLSStringHover_missingKeyWarning, null, false), propertiesFile, "", getEditor()); //$NON-NLS-1$ String value= properties.getProperty(identifier, null); String buffer= toHtml(propertiesFileName, value, null, true); return new NLSHoverControlInput(buffer, propertiesFile, identifier, getEditor()); } private String toHtml(String header, String string, String errorString, boolean addPreFormatted) { StringBuffer buffer= new StringBuffer(); HTMLPrinter.addSmallHeader(buffer, header); if (string != null) { if (addPreFormatted) { HTMLPrinter.addParagraph(buffer, ""); //$NON-NLS-1$ HTMLPrinter.addPreFormatted(buffer, HTMLPrinter.convertToHTMLContent(string)); } else { HTMLPrinter.addParagraph(buffer, string); } if (errorString != null) { HTMLPrinter.addParagraph(buffer, errorString); } } else { HTMLPrinter.addParagraph(buffer, JavaHoverMessages.NLSStringHover_NLSStringHover_missingKeyWarning); } HTMLPrinter.insertPageProlog(buffer, 0); HTMLPrinter.addPageEpilog(buffer); return buffer.toString(); } /** * The input for NLS hover. * * @since 3.5 */ private static class NLSHoverControlInput { private IStorage fpropertiesFile; private String fKeyName; private String fInformation; private IEditorPart fActiveEditor; /** * Creates the NLS hover input. * * @param information the hover info (string with simple HTML) * @param propertiesFile the properties file, or <code>null</code> if not found * @param key the NLS key * @param editor the active editor part */ public NLSHoverControlInput(String information, IStorage propertiesFile, String key, IEditorPart editor) { fInformation= information; fpropertiesFile= propertiesFile; fKeyName= key; fActiveEditor= editor; } } /** * The NLS hover control. * * @since 3.5 */ static class NLSHoverControl extends DefaultInformationControl implements IInformationControlExtension2 { /** * The NLS control input. */ private NLSHoverControlInput fInput; /** * Creates a resizable NLS hover control with the given shell as parent. * * @param parent the parent shell * @param tbm the toolbar manager or <code>null</code> if toolbar is not desired */ public NLSHoverControl(Shell parent, ToolBarManager tbm) { super(parent, tbm); } /** * Creates an NLS hover control with the given shell as parent. * * @param parent the parent shell * @param tooltipAffordanceString the text to be used in the status field or * <code>null</code> to hide the status field */ public NLSHoverControl(Shell parent, String tooltipAffordanceString) { super(parent, tooltipAffordanceString); } /** * {@inheritDoc} This control can handle {@link NLSStringHover.NLSHoverControlInput}. */ public void setInput(Object input) { Assert.isLegal(input instanceof NLSHoverControlInput); NLSHoverControlInput info= (NLSHoverControlInput)input; setInformation(info.fInformation); fInput= info; } /** * Returns the control input. * * @return the control input */ public NLSHoverControlInput getInput() { return fInput; } } /** * Presenter control creator. * * @since 3.5 */ private static final class PresenterControlCreator extends AbstractReusableInformationControlCreator { /* * @see org.eclipse.jdt.internal.ui.text.java.hover.AbstractReusableInformationControlCreator#doCreateInformationControl(org.eclipse.swt.widgets.Shell) */ @Override public IInformationControl doCreateInformationControl(Shell parent) { ToolBarManager tbm= new ToolBarManager(SWT.FLAT); NLSHoverControl iControl= new NLSHoverControl(parent, tbm); OpenPropertiesFileAction openPropertiesFileAction= new OpenPropertiesFileAction(iControl); tbm.add(openPropertiesFileAction); tbm.update(true); return iControl; } } /** * Hover control creator. * * @since 3.5 */ private static final class HoverControlCreator extends AbstractReusableInformationControlCreator { /** * The presenter control creator. */ private final IInformationControlCreator fPresenterControlCreator; /** * Creates the hover control creator. * * @param presenterControlCreator the presenter control creator */ public HoverControlCreator(IInformationControlCreator presenterControlCreator) { fPresenterControlCreator= presenterControlCreator; } /* * @see org.eclipse.jdt.internal.ui.text.java.hover.AbstractReusableInformationControlCreator#doCreateInformationControl(org.eclipse.swt.widgets.Shell) */ @Override public IInformationControl doCreateInformationControl(Shell parent) { return new NLSHoverControl(parent, EditorsUI.getTooltipAffordanceString()) { /* * @see org.eclipse.jface.text.IInformationControlExtension5#getInformationPresenterControlCreator() */ @Override public IInformationControlCreator getInformationPresenterControlCreator() { return fPresenterControlCreator; } }; } } /** * The hover control creator. * * @since 3.5 */ private IInformationControlCreator fHoverControlCreator; /** * The presentation control creator. * * @since 3.5 */ private IInformationControlCreator fPresenterControlCreator; /* * @see ITextHoverExtension#getHoverControlCreator() * @since 3.5 */ @Override public IInformationControlCreator getHoverControlCreator() { if (fHoverControlCreator == null) fHoverControlCreator= new HoverControlCreator(getInformationPresenterControlCreator()); return fHoverControlCreator; } /* * @see org.eclipse.jface.text.information.IInformationProviderExtension2#getInformationPresenterControlCreator() * @since 3.5 */ @Override public IInformationControlCreator getInformationPresenterControlCreator() { if (fPresenterControlCreator == null) fPresenterControlCreator= new PresenterControlCreator(); return fPresenterControlCreator; } /** * Action that opens the current hover NLS string in properties file. * * @since 3.5 */ private static final class OpenPropertiesFileAction extends Action { /** * The NLS hover control. */ private NLSHoverControl fControl; /** * Creates the action for opening properties file. * * @param control the NLS hover control */ public OpenPropertiesFileAction(NLSHoverControl control) { fControl= control; setText(JavaHoverMessages.NLSStringHover_open_in_properties_file); JavaPluginImages.setLocalImageDescriptors(this, "goto_input.gif"); //$NON-NLS-1$ } /* * @see org.eclipse.jface.action.Action#run() */ @Override public void run() { NLSHoverControlInput input= fControl.getInput(); NLSKeyHyperlink.openKeyInPropertiesFile(input.fKeyName, input.fpropertiesFile, input.fActiveEditor); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.util; import groovy.lang.Binding; import groovy.lang.GroovyClassLoader; import groovy.lang.GroovyCodeSource; import groovy.lang.GroovyResourceLoader; import groovy.lang.Script; import org.codehaus.groovy.GroovyBugError; import org.codehaus.groovy.ast.ClassHelper; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.classgen.GeneratorContext; import org.codehaus.groovy.control.ClassNodeResolver; import org.codehaus.groovy.control.CompilationFailedException; import org.codehaus.groovy.control.CompilationUnit; import org.codehaus.groovy.control.CompilerConfiguration; import org.codehaus.groovy.control.Phases; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.runtime.IOGroovyMethods; import org.codehaus.groovy.runtime.InvokerHelper; import org.codehaus.groovy.tools.gse.DependencyTracker; import org.codehaus.groovy.tools.gse.StringSetMap; import org.codehaus.groovy.vmplugin.VMPluginFactory; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.ref.WeakReference; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.security.CodeSource; import java.security.PrivilegedAction; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** * Specific script engine able to reload modified scripts as well as dealing properly * with dependent scripts. */ public class GroovyScriptEngine implements ResourceConnector { private static final ClassLoader CL_STUB = VMPluginFactory.getPlugin().doPrivileged((PrivilegedAction<ClassLoader>) () -> new ClassLoader() {}); private static final URL[] EMPTY_URL_ARRAY = new URL[0]; private static class LocalData { CompilationUnit cu; final StringSetMap dependencyCache = new StringSetMap(); final Map<String, String> precompiledEntries = new HashMap<String, String>(); } private static WeakReference<ThreadLocal<LocalData>> localData = new WeakReference<ThreadLocal<LocalData>>(null); private static synchronized ThreadLocal<LocalData> getLocalData() { ThreadLocal<LocalData> local = localData.get(); if (local != null) return local; local = new ThreadLocal<LocalData>(); localData = new WeakReference<ThreadLocal<LocalData>>(local); return local; } private final URL[] roots; private final ResourceConnector rc; private final ClassLoader parentLoader; private GroovyClassLoader groovyLoader; private final Map<String, ScriptCacheEntry> scriptCache = new ConcurrentHashMap<String, ScriptCacheEntry>(); private CompilerConfiguration config; { config = new CompilerConfiguration(CompilerConfiguration.DEFAULT); config.setSourceEncoding(CompilerConfiguration.DEFAULT_SOURCE_ENCODING); } //TODO: more finals? private static class ScriptCacheEntry { private final Class scriptClass; private final long lastModified, lastCheck; private final Set<String> dependencies; private final boolean sourceNewer; public ScriptCacheEntry(Class clazz, long modified, long lastCheck, Set<String> depend, boolean sourceNewer) { this.scriptClass = clazz; this.lastModified = modified; this.lastCheck = lastCheck; this.dependencies = depend; this.sourceNewer = sourceNewer; } public ScriptCacheEntry(ScriptCacheEntry old, long lastCheck, boolean sourceNewer) { this(old.scriptClass, old.lastModified, lastCheck, old.dependencies, sourceNewer); } } private class ScriptClassLoader extends GroovyClassLoader { public ScriptClassLoader(GroovyClassLoader loader) { super(loader); } public ScriptClassLoader(ClassLoader loader, CompilerConfiguration config) { super(loader, config, false); setResLoader(); } private void setResLoader() { final GroovyResourceLoader rl = getResourceLoader(); setResourceLoader(className -> { String filename; for (String extension : getConfig().getScriptExtensions()) { filename = className.replace('.', File.separatorChar) + "." + extension; try { URLConnection dependentScriptConn = rc.getResourceConnection(filename); return dependentScriptConn.getURL(); } catch (ResourceException e) { //TODO: maybe do something here? } } return rl.loadGroovySource(className); }); } @Override protected CompilationUnit createCompilationUnit(CompilerConfiguration configuration, CodeSource source) { CompilationUnit cu = super.createCompilationUnit(configuration, source); LocalData local = getLocalData().get(); local.cu = cu; final StringSetMap cache = local.dependencyCache; final Map<String, String> precompiledEntries = local.precompiledEntries; // "." is used to transfer compilation dependencies, which will be // recollected later during compilation for (String depSourcePath : cache.get(".")) { try { cache.get(depSourcePath); cu.addSource(getResourceConnection(depSourcePath).getURL()); } catch (ResourceException e) { /* ignore */ } } // remove all old entries including the "." entry cache.clear(); cu.addPhaseOperation((final SourceUnit sourceUnit, final GeneratorContext context, final ClassNode classNode) -> { // GROOVY-4013: If it is an inner class, tracking its dependencies doesn't really // serve any purpose and also interferes with the caching done to track dependencies if (classNode.getOuterClass() != null) return; DependencyTracker dt = new DependencyTracker(sourceUnit, cache, precompiledEntries); dt.visitClass(classNode); }, Phases.CLASS_GENERATION); cu.setClassNodeResolver(new ClassNodeResolver() { @Override public LookupResult findClassNode(String origName, CompilationUnit compilationUnit) { CompilerConfiguration cc = compilationUnit.getConfiguration(); String name = origName.replace('.', '/'); for (String ext : cc.getScriptExtensions()) { try { String finalName = name + "." + ext; URLConnection conn = rc.getResourceConnection(finalName); URL url = conn.getURL(); String path = url.toExternalForm(); ScriptCacheEntry entry = scriptCache.get(path); Class clazz = null; if (entry != null) clazz = entry.scriptClass; if (GroovyScriptEngine.this.isSourceNewer(entry)) { try { SourceUnit su = compilationUnit.addSource(url); return new LookupResult(su, null); } finally { forceClose(conn); } } else { precompiledEntries.put(origName, path); } if (clazz != null) { ClassNode cn = ClassHelper.make(clazz); return new LookupResult(null, cn); } } catch (ResourceException re) { // skip } } return super.findClassNode(origName, compilationUnit); } }); return cu; } @Override public Class parseClass(GroovyCodeSource codeSource, boolean shouldCacheSource) throws CompilationFailedException { synchronized (sourceCache) { return doParseClass(codeSource); } } private Class<?> doParseClass(GroovyCodeSource codeSource) { // local is kept as hard reference to avoid garbage collection ThreadLocal<LocalData> localTh = getLocalData(); LocalData localData = new LocalData(); localTh.set(localData); StringSetMap cache = localData.dependencyCache; Class<?> answer = null; try { updateLocalDependencyCache(codeSource, localData); answer = super.parseClass(codeSource, false); updateScriptCache(localData); } finally { cache.clear(); localTh.remove(); } return answer; } private void updateLocalDependencyCache(GroovyCodeSource codeSource, LocalData localData) { // we put the old dependencies into local cache so createCompilationUnit // can pick it up. We put that entry under the name "." ScriptCacheEntry origEntry = scriptCache.get(codeSource.getName()); Set<String> origDep = null; if (origEntry != null) origDep = origEntry.dependencies; if (origDep != null) { Set<String> newDep = new HashSet<String>(origDep.size()); for (String depName : origDep) { ScriptCacheEntry dep = scriptCache.get(depName); if (origEntry == dep || GroovyScriptEngine.this.isSourceNewer(dep)) { newDep.add(depName); } } StringSetMap cache = localData.dependencyCache; cache.put(".", newDep); } } private void updateScriptCache(LocalData localData) { StringSetMap cache = localData.dependencyCache; cache.makeTransitiveHull(); long time = getCurrentTime(); Set<String> entryNames = new HashSet<String>(); for (Map.Entry<String, Set<String>> entry : cache.entrySet()) { String className = entry.getKey(); Class clazz = getClassCacheEntry(className); if (clazz == null) continue; String entryName = getPath(clazz, localData.precompiledEntries); if (entryNames.contains(entryName)) continue; entryNames.add(entryName); Set<String> value = convertToPaths(entry.getValue(), localData.precompiledEntries); long lastModified; try { lastModified = getLastModified(entryName); } catch (ResourceException e) { lastModified = time; } ScriptCacheEntry cacheEntry = new ScriptCacheEntry(clazz, lastModified, time, value, false); scriptCache.put(entryName, cacheEntry); } } private String getPath(Class clazz, Map<String, String> precompiledEntries) { CompilationUnit cu = getLocalData().get().cu; String name = clazz.getName(); ClassNode classNode = cu.getClassNode(name); if (classNode == null) { // this is a precompiled class! String path = precompiledEntries.get(name); if (path == null) throw new GroovyBugError("Precompiled class " + name + " should be available in precompiled entries map, but was not."); return path; } else { return classNode.getModule().getContext().getName(); } } private Set<String> convertToPaths(Set<String> orig, Map<String, String> precompiledEntries) { Set<String> ret = new HashSet<String>(); for (String className : orig) { Class clazz = getClassCacheEntry(className); if (clazz == null) continue; ret.add(getPath(clazz, precompiledEntries)); } return ret; } } /** * Simple testing harness for the GSE. Enter script roots as arguments and * then input script names to run them. * * @param urls an array of URLs * @throws Exception if something goes wrong */ public static void main(String[] urls) throws Exception { GroovyScriptEngine gse = new GroovyScriptEngine(urls); BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); String line; while (true) { System.out.print("groovy> "); if ((line = br.readLine()) == null || line.equals("quit")) { break; } try { System.out.println(gse.run(line, new Binding())); } catch (Exception e) { e.printStackTrace(); } } } /** * Initialize a new GroovyClassLoader with a default or * constructor-supplied parentClassLoader. * * @return the parent classloader used to load scripts */ private GroovyClassLoader initGroovyLoader() { GroovyClassLoader groovyClassLoader = VMPluginFactory.getPlugin().doPrivileged((PrivilegedAction<ScriptClassLoader>) () -> { if (parentLoader instanceof GroovyClassLoader) { return new ScriptClassLoader((GroovyClassLoader) parentLoader); } else { return new ScriptClassLoader(parentLoader, config); } }); for (URL root : roots) groovyClassLoader.addURL(root); return groovyClassLoader; } /** * Get a resource connection as a <code>URLConnection</code> to retrieve a script * from the <code>ResourceConnector</code>. * * @param resourceName name of the resource to be retrieved * @return a URLConnection to the resource * @throws ResourceException */ @Override public URLConnection getResourceConnection(String resourceName) throws ResourceException { // Get the URLConnection URLConnection groovyScriptConn = null; ResourceException se = null; for (URL root : roots) { URL scriptURL = null; try { scriptURL = new URL(root, resourceName); groovyScriptConn = openConnection(scriptURL); break; // Now this is a bit unusual } catch (MalformedURLException e) { String message = "Malformed URL: " + root + ", " + resourceName; if (se == null) { se = new ResourceException(message); } else { se = new ResourceException(message, se); } } catch (IOException e1) { String message = "Cannot open URL: " + root + resourceName; groovyScriptConn = null; if (se == null) { se = new ResourceException(message); } else { se = new ResourceException(message, se); } } } if (se == null) se = new ResourceException("No resource for " + resourceName + " was found"); // If we didn't find anything, report on all the exceptions that occurred. if (groovyScriptConn == null) throw se; return groovyScriptConn; } private static URLConnection openConnection(URL scriptURL) throws IOException { URLConnection urlConnection = scriptURL.openConnection(); verifyInputStream(urlConnection); return scriptURL.openConnection(); } /** * This method closes a {@link URLConnection} by getting its {@link InputStream} and calling the * {@link InputStream#close()} method on it. The {@link URLConnection} doesn't have a close() method * and relies on garbage collection to close the underlying connection to the file. * Relying on garbage collection could lead to the application exhausting the number of files the * user is allowed to have open at any one point in time and cause the application to crash * ({@link java.io.FileNotFoundException} (Too many open files)). * Hence the need for this method to explicitly close the underlying connection to the file. * * @param urlConnection the {@link URLConnection} to be "closed" to close the underlying file descriptors. */ private static void forceClose(URLConnection urlConnection) { if (urlConnection != null) { // We need to get the input stream and close it to force the open // file descriptor to be released. Otherwise, we will reach the limit // for number of files open at one time. try { verifyInputStream(urlConnection); } catch (Exception e) { // Do nothing: We were not going to use it anyway. } } } private static void verifyInputStream(URLConnection urlConnection) throws IOException { try (InputStream in = urlConnection.getInputStream()) { } } /** * The groovy script engine will run groovy scripts and reload them and * their dependencies when they are modified. This is useful for embedding * groovy in other containers like games and application servers. * * @param roots This an array of URLs where Groovy scripts will be stored. They should * be laid out using their package structure like Java classes */ private GroovyScriptEngine(URL[] roots, ClassLoader parent, ResourceConnector rc) { if (roots == null) roots = EMPTY_URL_ARRAY; this.roots = roots; if (rc == null) rc = this; this.rc = rc; if (parent == CL_STUB) parent = this.getClass().getClassLoader(); this.parentLoader = parent; this.groovyLoader = initGroovyLoader(); } public GroovyScriptEngine(URL[] roots) { this(roots, CL_STUB, null); } public GroovyScriptEngine(URL[] roots, ClassLoader parentClassLoader) { this(roots, parentClassLoader, null); } public GroovyScriptEngine(String[] urls) throws IOException { this(createRoots(urls), CL_STUB, null); } private static URL[] createRoots(String[] urls) throws MalformedURLException { if (urls == null) return null; URL[] roots = new URL[urls.length]; for (int i = 0; i < roots.length; i++) { if (urls[i].contains("://")) { roots[i] = new URL(urls[i]); } else { roots[i] = new File(urls[i]).toURI().toURL(); } } return roots; } public GroovyScriptEngine(String[] urls, ClassLoader parentClassLoader) throws IOException { this(createRoots(urls), parentClassLoader, null); } public GroovyScriptEngine(String url) throws IOException { this(new String[]{url}); } public GroovyScriptEngine(String url, ClassLoader parentClassLoader) throws IOException { this(new String[]{url}, parentClassLoader); } public GroovyScriptEngine(ResourceConnector rc) { this(null, CL_STUB, rc); } public GroovyScriptEngine(ResourceConnector rc, ClassLoader parentClassLoader) { this(null, parentClassLoader, rc); } /** * Get the <code>ClassLoader</code> that will serve as the parent ClassLoader of the * {@link GroovyClassLoader} in which scripts will be executed. By default, this is the * ClassLoader that loaded the <code>GroovyScriptEngine</code> class. * * @return the parent classloader used to load scripts */ public ClassLoader getParentClassLoader() { return parentLoader; } /** * Get the class of the scriptName in question, so that you can instantiate * Groovy objects with caching and reloading. * * @param scriptName resource name pointing to the script * @return the loaded scriptName as a compiled class * @throws ResourceException if there is a problem accessing the script * @throws ScriptException if there is a problem parsing the script */ public Class loadScriptByName(String scriptName) throws ResourceException, ScriptException { URLConnection conn = rc.getResourceConnection(scriptName); String path = conn.getURL().toExternalForm(); ScriptCacheEntry entry = scriptCache.get(path); Class clazz = null; if (entry != null) clazz = entry.scriptClass; try { if (isSourceNewer(entry)) { try { String encoding = conn.getContentEncoding() != null ? conn.getContentEncoding() : config.getSourceEncoding(); String content = IOGroovyMethods.getText(conn.getInputStream(), encoding); clazz = groovyLoader.parseClass(content, path); } catch (IOException e) { throw new ResourceException(e); } } } finally { forceClose(conn); } return clazz; } /** * Run a script identified by name with a single argument. * * @param scriptName name of the script to run * @param argument a single argument passed as a variable named <code>arg</code> in the binding * @return a <code>toString()</code> representation of the result of the execution of the script * @throws ResourceException if there is a problem accessing the script * @throws ScriptException if there is a problem parsing the script */ public String run(String scriptName, String argument) throws ResourceException, ScriptException { Binding binding = new Binding(); binding.setVariable("arg", argument); Object result = run(scriptName, binding); return result == null ? "" : result.toString(); } /** * Run a script identified by name with a given binding. * * @param scriptName name of the script to run * @param binding the binding to pass to the script * @return an object * @throws ResourceException if there is a problem accessing the script * @throws ScriptException if there is a problem parsing the script */ public Object run(String scriptName, Binding binding) throws ResourceException, ScriptException { return createScript(scriptName, binding).run(); } /** * Creates a Script with a given scriptName and binding. * * @param scriptName name of the script to run * @param binding the binding to pass to the script * @return the script object * @throws ResourceException if there is a problem accessing the script * @throws ScriptException if there is a problem parsing the script */ public Script createScript(String scriptName, Binding binding) throws ResourceException, ScriptException { return InvokerHelper.createScript(loadScriptByName(scriptName), binding); } private long getLastModified(String scriptName) throws ResourceException { URLConnection conn = rc.getResourceConnection(scriptName); long lastMod = 0; try { lastMod = conn.getLastModified(); } finally { // getResourceConnection() opening the inputstream, let's ensure all streams are closed forceClose(conn); } return lastMod; } protected boolean isSourceNewer(ScriptCacheEntry entry) { if (entry == null) return true; long mainEntryLastCheck = entry.lastCheck; long now = 0; boolean returnValue = false; for (String scriptName : entry.dependencies) { ScriptCacheEntry depEntry = scriptCache.get(scriptName); if (depEntry.sourceNewer) return true; // check if maybe dependency was recompiled, but this one here not if (mainEntryLastCheck < depEntry.lastModified) { returnValue = true; continue; } if (now == 0) now = getCurrentTime(); long nextSourceCheck = depEntry.lastCheck + config.getMinimumRecompilationInterval(); if (nextSourceCheck > now) continue; long lastMod; try { lastMod = getLastModified(scriptName); } catch (ResourceException e) { /* Class A depends on class B and they both are compiled once. If class A is then loaded again from loadScriptByName(scriptName) after class B and all references to it have been deleted from the root, this exception will occur. It is still valid and necessary to attempt a recompile of class A. */ return true; } if (depEntry.lastModified < lastMod) { depEntry = new ScriptCacheEntry(depEntry, lastMod, true); scriptCache.put(scriptName, depEntry); returnValue = true; } else { depEntry = new ScriptCacheEntry(depEntry, now, false); scriptCache.put(scriptName, depEntry); } } return returnValue; } /** * Returns the GroovyClassLoader associated with this script engine instance. * Useful if you need to pass the class loader to another library. * * @return the GroovyClassLoader */ public GroovyClassLoader getGroovyClassLoader() { return groovyLoader; } /** * @return a non null compiler configuration */ public CompilerConfiguration getConfig() { return config; } /** * sets a compiler configuration * * @param config - the compiler configuration * @throws NullPointerException if config is null */ public void setConfig(CompilerConfiguration config) { if (config == null) throw new NullPointerException("configuration cannot be null"); this.config = config; this.groovyLoader = initGroovyLoader(); } protected long getCurrentTime() { return System.currentTimeMillis(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.core.fs; import org.apache.flink.core.testutils.CheckedThread; import org.apache.flink.util.AbstractAutoCloseableRegistry; import org.apache.flink.util.ExceptionUtils; import org.junit.After; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.Closeable; import java.io.IOException; import java.util.concurrent.atomic.AtomicInteger; /** Tests for the {@link SafetyNetCloseableRegistry}. */ public class SafetyNetCloseableRegistryTest extends AbstractAutoCloseableRegistryTest< Closeable, WrappingProxyCloseable<? extends Closeable>, SafetyNetCloseableRegistry.PhantomDelegatingCloseableRef> { @Rule public final TemporaryFolder tmpFolder = new TemporaryFolder(); @Override protected void registerCloseable(final Closeable closeable) throws IOException { final WrappingProxyCloseable<Closeable> wrappingProxyCloseable = new WrappingProxyCloseable<Closeable>() { @Override public void close() throws IOException { closeable.close(); } @Override public Closeable getWrappedDelegate() { return closeable; } }; closeableRegistry.registerCloseable(wrappingProxyCloseable); } @Override protected AbstractAutoCloseableRegistry< Closeable, WrappingProxyCloseable<? extends Closeable>, SafetyNetCloseableRegistry.PhantomDelegatingCloseableRef, IOException> createRegistry() { // SafetyNetCloseableRegistry has a global reaper thread to reclaim leaking resources, // in normal cases, that thread will be interrupted in closing of last active registry // and then shutdown in background. But in testing codes, some assertions need leaking // resources reclaimed, so we override reaper thread to join itself on interrupt. Thus, // after close of last active registry, we can assert post-close-invariants safely. return new SafetyNetCloseableRegistry(JoinOnInterruptReaperThread::new); } @Override protected AbstractAutoCloseableRegistryTest.ProducerThread< Closeable, WrappingProxyCloseable<? extends Closeable>, SafetyNetCloseableRegistry.PhantomDelegatingCloseableRef> createProducerThread( AbstractAutoCloseableRegistry< Closeable, WrappingProxyCloseable<? extends Closeable>, SafetyNetCloseableRegistry.PhantomDelegatingCloseableRef, IOException> registry, AtomicInteger unclosedCounter, int maxStreams) { return new AbstractAutoCloseableRegistryTest.ProducerThread< Closeable, WrappingProxyCloseable<? extends Closeable>, SafetyNetCloseableRegistry.PhantomDelegatingCloseableRef>( registry, unclosedCounter, maxStreams) { int count = 0; @Override protected void createAndRegisterStream() throws IOException { String debug = Thread.currentThread().getName() + " " + count; TestStream testStream = new TestStream(refCount); // this method automatically registers the stream with the given registry. @SuppressWarnings("unused") ClosingFSDataInputStream pis = ClosingFSDataInputStream.wrapSafe( testStream, (SafetyNetCloseableRegistry) registry, debug); // reference dies here ++count; } }; } @After public void tearDown() { Assert.assertFalse(SafetyNetCloseableRegistry.isReaperThreadRunning()); } @Test public void testCorrectScopesForSafetyNet() throws Exception { CheckedThread t1 = new CheckedThread() { @Override public void go() throws Exception { try { FileSystem fs1 = FileSystem.getLocalFileSystem(); // ensure no safety net in place Assert.assertFalse(fs1 instanceof SafetyNetWrapperFileSystem); FileSystemSafetyNet.initializeSafetyNetForThread(); fs1 = FileSystem.getLocalFileSystem(); // ensure safety net is in place now Assert.assertTrue(fs1 instanceof SafetyNetWrapperFileSystem); Path tmp = new Path(tmpFolder.newFolder().toURI().toString(), "test_file"); try (FSDataOutputStream stream = fs1.create(tmp, FileSystem.WriteMode.NO_OVERWRITE)) { CheckedThread t2 = new CheckedThread() { @Override public void go() { FileSystem fs2 = FileSystem.getLocalFileSystem(); // ensure the safety net does not leak here Assert.assertFalse( fs2 instanceof SafetyNetWrapperFileSystem); FileSystemSafetyNet.initializeSafetyNetForThread(); fs2 = FileSystem.getLocalFileSystem(); // ensure we can bring another safety net in place Assert.assertTrue( fs2 instanceof SafetyNetWrapperFileSystem); FileSystemSafetyNet .closeSafetyNetAndGuardedResourcesForThread(); fs2 = FileSystem.getLocalFileSystem(); // and that we can remove it again Assert.assertFalse( fs2 instanceof SafetyNetWrapperFileSystem); } }; t2.start(); t2.sync(); // ensure stream is still open and was never closed by any // interferences stream.write(42); FileSystemSafetyNet.closeSafetyNetAndGuardedResourcesForThread(); // ensure leaking stream was closed try { stream.write(43); Assert.fail(); } catch (IOException ignore) { } fs1 = FileSystem.getLocalFileSystem(); // ensure safety net was removed Assert.assertFalse(fs1 instanceof SafetyNetWrapperFileSystem); } finally { fs1.delete(tmp, false); } } catch (Exception e) { Assert.fail(ExceptionUtils.stringifyException(e)); } } }; t1.start(); t1.sync(); } @Test public void testSafetyNetClose() throws Exception { setup(20); startThreads(); joinThreads(); for (int i = 0; i < 5 && unclosedCounter.get() > 0; ++i) { System.gc(); Thread.sleep(50); } Assert.assertEquals(0, unclosedCounter.get()); closeableRegistry.close(); } @Test public void testReaperThreadSpawnAndStop() throws Exception { Assert.assertFalse(SafetyNetCloseableRegistry.isReaperThreadRunning()); try (SafetyNetCloseableRegistry ignored = new SafetyNetCloseableRegistry()) { Assert.assertTrue(SafetyNetCloseableRegistry.isReaperThreadRunning()); try (SafetyNetCloseableRegistry ignored2 = new SafetyNetCloseableRegistry()) { Assert.assertTrue(SafetyNetCloseableRegistry.isReaperThreadRunning()); } Assert.assertTrue(SafetyNetCloseableRegistry.isReaperThreadRunning()); } Assert.assertFalse(SafetyNetCloseableRegistry.isReaperThreadRunning()); } /** * Test whether failure to start thread in {@link SafetyNetCloseableRegistry} constructor can * lead to failure of subsequent state check. */ @Test public void testReaperThreadStartFailed() throws Exception { try { new SafetyNetCloseableRegistry(() -> new OutOfMemoryReaperThread()); } catch (java.lang.OutOfMemoryError error) { } Assert.assertFalse(SafetyNetCloseableRegistry.isReaperThreadRunning()); // the OOM error will not lead to failure of subsequent constructor call. SafetyNetCloseableRegistry closeableRegistry = new SafetyNetCloseableRegistry(); Assert.assertTrue(SafetyNetCloseableRegistry.isReaperThreadRunning()); closeableRegistry.close(); } private static class JoinOnInterruptReaperThread extends SafetyNetCloseableRegistry.CloseableReaperThread { @Override public void interrupt() { super.interrupt(); try { join(); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } } } private static class OutOfMemoryReaperThread extends SafetyNetCloseableRegistry.CloseableReaperThread { @Override public synchronized void start() { throw new java.lang.OutOfMemoryError(); } } }
// This file was generated by Mendix Business Modeler. // // WARNING: Code you write here will be lost the next time you deploy the project. package recaptcha.proxies; import com.mendix.core.Core; import com.mendix.core.CoreException; import com.mendix.systemwideinterfaces.core.IContext; import com.mendix.systemwideinterfaces.core.IMendixIdentifier; import com.mendix.systemwideinterfaces.core.IMendixObject; /** * This key is used to display the recaptcha widget on your site. By default the Google test key is used. */ public class SiteKey { private final IMendixObject siteKeyMendixObject; private final IContext context; /** * Internal name of this entity */ public static final String entityName = "reCAPTCHA.SiteKey"; /** * Enum describing members of this entity */ public enum MemberNames { sitekey("sitekey"); private String metaName; MemberNames(String s) { metaName = s; } @Override public String toString() { return metaName; } } public SiteKey(IContext context) { this(context, Core.instantiate(context, "reCAPTCHA.SiteKey")); } protected SiteKey(IContext context, IMendixObject siteKeyMendixObject) { if (siteKeyMendixObject == null) throw new IllegalArgumentException("The given object cannot be null."); if (!Core.isSubClassOf("reCAPTCHA.SiteKey", siteKeyMendixObject.getType())) throw new IllegalArgumentException("The given object is not a reCAPTCHA.SiteKey"); this.siteKeyMendixObject = siteKeyMendixObject; this.context = context; } /** * @deprecated Use 'SiteKey.load(IContext, IMendixIdentifier)' instead. */ @Deprecated public static recaptcha.proxies.SiteKey initialize(IContext context, IMendixIdentifier mendixIdentifier) throws CoreException { return recaptcha.proxies.SiteKey.load(context, mendixIdentifier); } /** * Initialize a proxy using context (recommended). This context will be used for security checking when the get- and set-methods without context parameters are called. * The get- and set-methods with context parameter should be used when for instance sudo access is necessary (IContext.getSudoContext() can be used to obtain sudo access). */ public static recaptcha.proxies.SiteKey initialize(IContext context, IMendixObject mendixObject) { return new recaptcha.proxies.SiteKey(context, mendixObject); } public static recaptcha.proxies.SiteKey load(IContext context, IMendixIdentifier mendixIdentifier) throws CoreException { IMendixObject mendixObject = Core.retrieveId(context, mendixIdentifier); return recaptcha.proxies.SiteKey.initialize(context, mendixObject); } public static java.util.List<recaptcha.proxies.SiteKey> load(IContext context, String xpathConstraint) throws CoreException { java.util.List<recaptcha.proxies.SiteKey> result = new java.util.ArrayList<recaptcha.proxies.SiteKey>(); for (IMendixObject obj : Core.retrieveXPathQuery(context, "//reCAPTCHA.SiteKey" + xpathConstraint)) result.add(recaptcha.proxies.SiteKey.initialize(context, obj)); return result; } /** * Commit the changes made on this proxy object. */ public final void commit() throws CoreException { Core.commit(context, getMendixObject()); } /** * Commit the changes made on this proxy object using the specified context. */ public final void commit(IContext context) throws CoreException { Core.commit(context, getMendixObject()); } /** * Delete the object. */ public final void delete() { Core.delete(context, getMendixObject()); } /** * Delete the object using the specified context. */ public final void delete(IContext context) { Core.delete(context, getMendixObject()); } /** * @return value of sitekey */ public final String getsitekey() { return getsitekey(getContext()); } /** * @param context * @return value of sitekey */ public final String getsitekey(IContext context) { return (String) getMendixObject().getValue(context, MemberNames.sitekey.toString()); } /** * Set value of sitekey * @param sitekey */ public final void setsitekey(String sitekey) { setsitekey(getContext(), sitekey); } /** * Set value of sitekey * @param context * @param sitekey */ public final void setsitekey(IContext context, String sitekey) { getMendixObject().setValue(context, MemberNames.sitekey.toString(), sitekey); } /** * @return the IMendixObject instance of this proxy for use in the Core interface. */ public final IMendixObject getMendixObject() { return siteKeyMendixObject; } /** * @return the IContext instance of this proxy, or null if no IContext instance was specified at initialization. */ public final IContext getContext() { return context; } @Override public boolean equals(Object obj) { if (obj == this) return true; if (obj != null && getClass().equals(obj.getClass())) { final recaptcha.proxies.SiteKey that = (recaptcha.proxies.SiteKey) obj; return getMendixObject().equals(that.getMendixObject()); } return false; } @Override public int hashCode() { return getMendixObject().hashCode(); } /** * @return String name of this class */ public static String getType() { return "reCAPTCHA.SiteKey"; } /** * @return String GUID from this object, format: ID_0000000000 * @deprecated Use getMendixObject().getId().toLong() to get a unique identifier for this object. */ @Deprecated public String getGUID() { return "ID_" + getMendixObject().getId().toLong(); } }
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.instrument.future; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertFalse; import static org.testng.AssertJUnit.assertTrue; import org.testng.annotations.Test; import org.threeten.bp.ZoneOffset; import org.threeten.bp.ZonedDateTime; import com.opengamma.analytics.financial.instrument.index.IndexON; import com.opengamma.analytics.financial.instrument.index.IndexONMaster; import com.opengamma.analytics.financial.interestrate.future.derivative.FederalFundsFutureSecurity; import com.opengamma.analytics.financial.interestrate.future.derivative.FederalFundsFutureTransaction; import com.opengamma.analytics.financial.schedule.ScheduleCalculator; import com.opengamma.financial.convention.calendar.Calendar; import com.opengamma.financial.convention.calendar.MondayToFridayCalendar; import com.opengamma.timeseries.precise.zdt.ImmutableZonedDateTimeDoubleTimeSeries; import com.opengamma.timeseries.precise.zdt.ZonedDateTimeDoubleTimeSeries; import com.opengamma.util.test.TestGroup; import com.opengamma.util.time.DateUtils; /** * Tests related to the construction of Federal Funds Futures transactions. */ @Test(groups = TestGroup.UNIT) public class FederalFundsFutureTransactionDefinitionTest { private static final Calendar NYC = new MondayToFridayCalendar("NYC"); private static final IndexON INDEX_FEDFUND = IndexONMaster.getInstance().getIndex("FED FUND"); private static final ZonedDateTime MARCH_1 = DateUtils.getUTCDate(2012, 3, 1); private static final double NOTIONAL = 5000000; private static final double PAYMENT_ACCURAL_FACTOR = 1.0 / 12.0; private static final String NAME = "FFH2"; private static final ZonedDateTime TRADE_DATE = DateUtils.getUTCDate(2012, 2, 1); private static final double TRADE_PRICE = 0.99900; private static final int QUANTITY = 12; private static final FederalFundsFutureSecurityDefinition FUTURE_SECURITY_DEFINITION = FederalFundsFutureSecurityDefinition.from(MARCH_1, INDEX_FEDFUND, NOTIONAL, PAYMENT_ACCURAL_FACTOR, NAME, NYC); private static final FederalFundsFutureTransactionDefinition FUTURE_TRANSACTION_DEFINITION = new FederalFundsFutureTransactionDefinition(FUTURE_SECURITY_DEFINITION, QUANTITY, TRADE_DATE, TRADE_PRICE); private static final String CURVE_NAME = "OIS"; @Test(expectedExceptions = IllegalArgumentException.class) public void nullUnderlying() { new FederalFundsFutureTransactionDefinition(null, QUANTITY, TRADE_DATE, TRADE_PRICE); } @Test(expectedExceptions = IllegalArgumentException.class) public void nullTradeDate() { new FederalFundsFutureTransactionDefinition(FUTURE_SECURITY_DEFINITION, QUANTITY, null, TRADE_PRICE); } @Test /** * Tests the getter methods. */ public void getter() { assertEquals("Fed fund future transaction definition: getter", FUTURE_SECURITY_DEFINITION, FUTURE_TRANSACTION_DEFINITION.getUnderlyingSecurity()); assertEquals("Fed fund future transaction definition: getter", QUANTITY, FUTURE_TRANSACTION_DEFINITION.getQuantity()); assertEquals("Fed fund future transaction definition: getter", TRADE_DATE, FUTURE_TRANSACTION_DEFINITION.getTradeDate()); assertEquals("Fed fund future transaction definition: getter", TRADE_PRICE, FUTURE_TRANSACTION_DEFINITION.getTradePrice()); } @Test /** * Tests the equal and hashCode methods. */ public void equalHash() { assertTrue(FUTURE_TRANSACTION_DEFINITION.equals(FUTURE_TRANSACTION_DEFINITION)); final FederalFundsFutureTransactionDefinition other = new FederalFundsFutureTransactionDefinition(FUTURE_SECURITY_DEFINITION, QUANTITY, TRADE_DATE, TRADE_PRICE); assertTrue(FUTURE_TRANSACTION_DEFINITION.equals(other)); assertTrue(FUTURE_TRANSACTION_DEFINITION.hashCode() == other.hashCode()); FederalFundsFutureTransactionDefinition modifiedFuture; final FederalFundsFutureSecurityDefinition otherSecurity = FederalFundsFutureSecurityDefinition.from(MARCH_1, INDEX_FEDFUND, NOTIONAL, PAYMENT_ACCURAL_FACTOR, "Other", NYC); modifiedFuture = new FederalFundsFutureTransactionDefinition(otherSecurity, QUANTITY, TRADE_DATE, TRADE_PRICE); assertFalse(FUTURE_TRANSACTION_DEFINITION.equals(modifiedFuture)); modifiedFuture = new FederalFundsFutureTransactionDefinition(FUTURE_SECURITY_DEFINITION, QUANTITY + 1, TRADE_DATE, TRADE_PRICE); assertFalse(FUTURE_TRANSACTION_DEFINITION.equals(modifiedFuture)); modifiedFuture = new FederalFundsFutureTransactionDefinition(FUTURE_SECURITY_DEFINITION, QUANTITY, TRADE_DATE.minusDays(1), TRADE_PRICE); assertFalse(FUTURE_TRANSACTION_DEFINITION.equals(modifiedFuture)); modifiedFuture = new FederalFundsFutureTransactionDefinition(FUTURE_SECURITY_DEFINITION, QUANTITY, TRADE_DATE, TRADE_PRICE + 0.0001); assertFalse(FUTURE_TRANSACTION_DEFINITION.equals(modifiedFuture)); assertFalse(FUTURE_TRANSACTION_DEFINITION.equals(TRADE_DATE)); assertFalse(FUTURE_TRANSACTION_DEFINITION.equals(null)); } @SuppressWarnings("deprecation") @Test /** * Tests the toDerivative method before the security first fixing date - trade date. */ public void toDerivativeNoFixingTradeDateDeprecated() { final ZonedDateTime referenceDate = TRADE_DATE; final ZonedDateTime[] closingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1)}; final double[] closingPrice = new double[] {0.99895, 0.99905}; final ZonedDateTimeDoubleTimeSeries closingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(closingDate, closingPrice, ZoneOffset.UTC); final ZonedDateTime[] fixingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1)}; final double[] fixingRate = new double[] {0.0010, 0.0011}; final ZonedDateTimeDoubleTimeSeries fixingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(fixingDate, fixingRate, ZoneOffset.UTC); final ZonedDateTimeDoubleTimeSeries[] data = new ZonedDateTimeDoubleTimeSeries[] {fixingTS, closingTS}; final FederalFundsFutureSecurity securityConverted = FUTURE_SECURITY_DEFINITION.toDerivative(referenceDate, fixingTS, CURVE_NAME); final FederalFundsFutureTransaction transactionExpected = new FederalFundsFutureTransaction(securityConverted, QUANTITY, TRADE_PRICE); final FederalFundsFutureTransaction transactionConverted = FUTURE_TRANSACTION_DEFINITION.toDerivative(referenceDate, data, CURVE_NAME); assertEquals("Fed fund future transaction definition: toDerivative", transactionExpected, transactionConverted); } @SuppressWarnings("deprecation") @Test /** * Tests the toDerivative method before the security first fixing date - after trade date. */ public void toDerivativeNoFixingAfterTradeDateDeprecated() { final ZonedDateTime referenceDate = ScheduleCalculator.getAdjustedDate(TRADE_DATE, 1, NYC); final ZonedDateTime[] closingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1), TRADE_DATE}; final double[] closingPrice = new double[] {0.99895, 0.99905, 0.99915}; final ZonedDateTimeDoubleTimeSeries closingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(closingDate, closingPrice, ZoneOffset.UTC); final ZonedDateTime[] fixingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1), TRADE_DATE}; final double[] fixingRate = new double[] {0.0010, 0.0011, 0.0009}; final ZonedDateTimeDoubleTimeSeries fixingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(fixingDate, fixingRate, ZoneOffset.UTC); final ZonedDateTimeDoubleTimeSeries[] data = new ZonedDateTimeDoubleTimeSeries[] {fixingTS, closingTS}; final FederalFundsFutureSecurity securityConverted = FUTURE_SECURITY_DEFINITION.toDerivative(referenceDate, fixingTS, CURVE_NAME); final FederalFundsFutureTransaction transactionExpected = new FederalFundsFutureTransaction(securityConverted, QUANTITY, closingPrice[2]); final FederalFundsFutureTransaction transactionConverted = FUTURE_TRANSACTION_DEFINITION.toDerivative(referenceDate, data, CURVE_NAME); assertEquals("Fed fund future transaction definition: toDerivative", transactionExpected, transactionConverted); } @SuppressWarnings("deprecation") @Test /** * Tests the toDerivative method after the security first fixing date, fixing unknown - after trade date. */ public void toDerivativeFixingStartedBeforePublicationAfterTradeDateDeprecated() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 3, 7); final ZonedDateTime[] closingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1), TRADE_DATE, DateUtils.getUTCDate(2012, 3, 6)}; final double[] closingPrice = new double[] {0.99895, 0.99905, 0.99915, 0.99925}; final ZonedDateTimeDoubleTimeSeries closingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(closingDate, closingPrice, ZoneOffset.UTC); final ZonedDateTime[] fixingDate = new ZonedDateTime[] {DateUtils.getUTCDate(2012, 3, 1), DateUtils.getUTCDate(2012, 3, 2), DateUtils.getUTCDate(2012, 3, 5), DateUtils.getUTCDate(2012, 3, 6)}; final double[] fixingRate = new double[] {0.0010, 0.0011, 0.0012, 0.0013}; final ZonedDateTimeDoubleTimeSeries fixingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(fixingDate, fixingRate, ZoneOffset.UTC); final ZonedDateTimeDoubleTimeSeries[] data = new ZonedDateTimeDoubleTimeSeries[] {fixingTS, closingTS}; final FederalFundsFutureSecurity securityConverted = FUTURE_SECURITY_DEFINITION.toDerivative(referenceDate, fixingTS, CURVE_NAME); final FederalFundsFutureTransaction transactionExpected = new FederalFundsFutureTransaction(securityConverted, QUANTITY, closingPrice[3]); final FederalFundsFutureTransaction transactionConverted = FUTURE_TRANSACTION_DEFINITION.toDerivative(referenceDate, data, CURVE_NAME); assertEquals("Fed fund future transaction definition: toDerivative", transactionExpected, transactionConverted); } @SuppressWarnings("deprecation") @Test /** * Tests the toDerivative method after the security first fixing date, fixing unknown - after trade date. */ public void toDerivativeFixingStartedAfterPublicationAfterTradeDateDeprecated() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 3, 7); final ZonedDateTime[] closingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1), TRADE_DATE, DateUtils.getUTCDate(2012, 3, 6)}; final double[] closingPrice = new double[] {0.99895, 0.99905, 0.99915, 0.99925}; final ZonedDateTimeDoubleTimeSeries closingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(closingDate, closingPrice, ZoneOffset.UTC); final ZonedDateTime[] fixingDate = new ZonedDateTime[] {DateUtils.getUTCDate(2012, 3, 1), DateUtils.getUTCDate(2012, 3, 2), DateUtils.getUTCDate(2012, 3, 5), DateUtils.getUTCDate(2012, 3, 6), DateUtils.getUTCDate(2012, 3, 7)}; final double[] fixingRate = new double[] {0.0010, 0.0011, 0.0012, 0.0013, 0.0014}; final ZonedDateTimeDoubleTimeSeries fixingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(fixingDate, fixingRate, ZoneOffset.UTC); final ZonedDateTimeDoubleTimeSeries[] data = new ZonedDateTimeDoubleTimeSeries[] {fixingTS, closingTS}; final FederalFundsFutureSecurity securityConverted = FUTURE_SECURITY_DEFINITION.toDerivative(referenceDate, fixingTS, CURVE_NAME); final FederalFundsFutureTransaction transactionExpected = new FederalFundsFutureTransaction(securityConverted, QUANTITY, closingPrice[3]); final FederalFundsFutureTransaction transactionConverted = FUTURE_TRANSACTION_DEFINITION.toDerivative(referenceDate, data, CURVE_NAME); assertEquals("Fed fund future transaction definition: toDerivative", transactionExpected, transactionConverted); } @SuppressWarnings("deprecation") @Test /** * Tests the toDerivative method after the security first fixing date, fixing unknown - after trade date. */ public void toDerivativeFixingStartedAfterPublicationTradeDateDeprecated() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 3, 7); final ZonedDateTime[] closingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1), TRADE_DATE, DateUtils.getUTCDate(2012, 3, 6)}; final double[] closingPrice = new double[] {0.99895, 0.99905, 0.99915, 0.99925}; final ZonedDateTimeDoubleTimeSeries closingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(closingDate, closingPrice, ZoneOffset.UTC); final ZonedDateTime[] fixingDate = new ZonedDateTime[] {DateUtils.getUTCDate(2012, 3, 1), DateUtils.getUTCDate(2012, 3, 2), DateUtils.getUTCDate(2012, 3, 5), DateUtils.getUTCDate(2012, 3, 6), DateUtils.getUTCDate(2012, 3, 7)}; final double[] fixingRate = new double[] {0.0010, 0.0011, 0.0012, 0.0013, 0.0014}; final ZonedDateTimeDoubleTimeSeries fixingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(fixingDate, fixingRate, ZoneOffset.UTC); final ZonedDateTimeDoubleTimeSeries[] data = new ZonedDateTimeDoubleTimeSeries[] {fixingTS, closingTS}; final FederalFundsFutureTransactionDefinition futureTransactionDefinition = new FederalFundsFutureTransactionDefinition(FUTURE_SECURITY_DEFINITION, QUANTITY, referenceDate, TRADE_PRICE); final FederalFundsFutureSecurity securityConverted = FUTURE_SECURITY_DEFINITION.toDerivative(referenceDate, fixingTS, CURVE_NAME); final FederalFundsFutureTransaction transactionExpected = new FederalFundsFutureTransaction(securityConverted, QUANTITY, TRADE_PRICE); final FederalFundsFutureTransaction transactionConverted = futureTransactionDefinition.toDerivative(referenceDate, data, CURVE_NAME); assertEquals("Fed fund future transaction definition: toDerivative", transactionExpected, transactionConverted); } @Test /** * Tests the toDerivative method before the security first fixing date - trade date. */ public void toDerivativeNoFixingTradeDate() { final ZonedDateTime referenceDate = TRADE_DATE; final ZonedDateTime[] closingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1)}; final double[] closingPrice = new double[] {0.99895, 0.99905}; final ZonedDateTimeDoubleTimeSeries closingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(closingDate, closingPrice, ZoneOffset.UTC); final ZonedDateTime[] fixingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1)}; final double[] fixingRate = new double[] {0.0010, 0.0011}; final ZonedDateTimeDoubleTimeSeries fixingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(fixingDate, fixingRate, ZoneOffset.UTC); final ZonedDateTimeDoubleTimeSeries[] data = new ZonedDateTimeDoubleTimeSeries[] {fixingTS, closingTS}; final FederalFundsFutureSecurity securityConverted = FUTURE_SECURITY_DEFINITION.toDerivative(referenceDate, fixingTS); final FederalFundsFutureTransaction transactionExpected = new FederalFundsFutureTransaction(securityConverted, QUANTITY, TRADE_PRICE); final FederalFundsFutureTransaction transactionConverted = FUTURE_TRANSACTION_DEFINITION.toDerivative(referenceDate, data); assertEquals("Fed fund future transaction definition: toDerivative", transactionExpected, transactionConverted); } @Test /** * Tests the toDerivative method before the security first fixing date - after trade date. */ public void toDerivativeNoFixingAfterTradeDate() { final ZonedDateTime referenceDate = ScheduleCalculator.getAdjustedDate(TRADE_DATE, 1, NYC); final ZonedDateTime[] closingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1), TRADE_DATE}; final double[] closingPrice = new double[] {0.99895, 0.99905, 0.99915}; final ZonedDateTimeDoubleTimeSeries closingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(closingDate, closingPrice, ZoneOffset.UTC); final ZonedDateTime[] fixingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1), TRADE_DATE}; final double[] fixingRate = new double[] {0.0010, 0.0011, 0.0009}; final ZonedDateTimeDoubleTimeSeries fixingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(fixingDate, fixingRate, ZoneOffset.UTC); final ZonedDateTimeDoubleTimeSeries[] data = new ZonedDateTimeDoubleTimeSeries[] {fixingTS, closingTS}; final FederalFundsFutureSecurity securityConverted = FUTURE_SECURITY_DEFINITION.toDerivative(referenceDate, fixingTS); final FederalFundsFutureTransaction transactionExpected = new FederalFundsFutureTransaction(securityConverted, QUANTITY, closingPrice[2]); final FederalFundsFutureTransaction transactionConverted = FUTURE_TRANSACTION_DEFINITION.toDerivative(referenceDate, data); assertEquals("Fed fund future transaction definition: toDerivative", transactionExpected, transactionConverted); } @Test /** * Tests the toDerivative method after the security first fixing date, fixing unknown - after trade date. */ public void toDerivativeFixingStartedBeforePublicationAfterTradeDate() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 3, 7); final ZonedDateTime[] closingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1), TRADE_DATE, DateUtils.getUTCDate(2012, 3, 6)}; final double[] closingPrice = new double[] {0.99895, 0.99905, 0.99915, 0.99925}; final ZonedDateTimeDoubleTimeSeries closingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(closingDate, closingPrice, ZoneOffset.UTC); final ZonedDateTime[] fixingDate = new ZonedDateTime[] {DateUtils.getUTCDate(2012, 3, 1), DateUtils.getUTCDate(2012, 3, 2), DateUtils.getUTCDate(2012, 3, 5), DateUtils.getUTCDate(2012, 3, 6)}; final double[] fixingRate = new double[] {0.0010, 0.0011, 0.0012, 0.0013}; final ZonedDateTimeDoubleTimeSeries fixingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(fixingDate, fixingRate, ZoneOffset.UTC); final ZonedDateTimeDoubleTimeSeries[] data = new ZonedDateTimeDoubleTimeSeries[] {fixingTS, closingTS}; final FederalFundsFutureSecurity securityConverted = FUTURE_SECURITY_DEFINITION.toDerivative(referenceDate, fixingTS); final FederalFundsFutureTransaction transactionExpected = new FederalFundsFutureTransaction(securityConverted, QUANTITY, closingPrice[3]); final FederalFundsFutureTransaction transactionConverted = FUTURE_TRANSACTION_DEFINITION.toDerivative(referenceDate, data); assertEquals("Fed fund future transaction definition: toDerivative", transactionExpected, transactionConverted); } @Test /** * Tests the toDerivative method after the security first fixing date, fixing unknown - after trade date. */ public void toDerivativeFixingStartedAfterPublicationAfterTradeDate() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 3, 7); final ZonedDateTime[] closingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1), TRADE_DATE, DateUtils.getUTCDate(2012, 3, 6)}; final double[] closingPrice = new double[] {0.99895, 0.99905, 0.99915, 0.99925}; final ZonedDateTimeDoubleTimeSeries closingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(closingDate, closingPrice, ZoneOffset.UTC); final ZonedDateTime[] fixingDate = new ZonedDateTime[] {DateUtils.getUTCDate(2012, 3, 1), DateUtils.getUTCDate(2012, 3, 2), DateUtils.getUTCDate(2012, 3, 5), DateUtils.getUTCDate(2012, 3, 6), DateUtils.getUTCDate(2012, 3, 7)}; final double[] fixingRate = new double[] {0.0010, 0.0011, 0.0012, 0.0013, 0.0014}; final ZonedDateTimeDoubleTimeSeries fixingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(fixingDate, fixingRate, ZoneOffset.UTC); final ZonedDateTimeDoubleTimeSeries[] data = new ZonedDateTimeDoubleTimeSeries[] {fixingTS, closingTS}; final FederalFundsFutureSecurity securityConverted = FUTURE_SECURITY_DEFINITION.toDerivative(referenceDate, fixingTS); final FederalFundsFutureTransaction transactionExpected = new FederalFundsFutureTransaction(securityConverted, QUANTITY, closingPrice[3]); final FederalFundsFutureTransaction transactionConverted = FUTURE_TRANSACTION_DEFINITION.toDerivative(referenceDate, data); assertEquals("Fed fund future transaction definition: toDerivative", transactionExpected, transactionConverted); } @Test /** * Tests the toDerivative method after the security first fixing date, fixing unknown - after trade date. */ public void toDerivativeFixingStartedAfterPublicationTradeDate() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 3, 7); final ZonedDateTime[] closingDate = new ZonedDateTime[] {TRADE_DATE.minusDays(2), TRADE_DATE.minusDays(1), TRADE_DATE, DateUtils.getUTCDate(2012, 3, 6)}; final double[] closingPrice = new double[] {0.99895, 0.99905, 0.99915, 0.99925}; final ZonedDateTimeDoubleTimeSeries closingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(closingDate, closingPrice, ZoneOffset.UTC); final ZonedDateTime[] fixingDate = new ZonedDateTime[] {DateUtils.getUTCDate(2012, 3, 1), DateUtils.getUTCDate(2012, 3, 2), DateUtils.getUTCDate(2012, 3, 5), DateUtils.getUTCDate(2012, 3, 6), DateUtils.getUTCDate(2012, 3, 7)}; final double[] fixingRate = new double[] {0.0010, 0.0011, 0.0012, 0.0013, 0.0014}; final ZonedDateTimeDoubleTimeSeries fixingTS = ImmutableZonedDateTimeDoubleTimeSeries.of(fixingDate, fixingRate, ZoneOffset.UTC); final ZonedDateTimeDoubleTimeSeries[] data = new ZonedDateTimeDoubleTimeSeries[] {fixingTS, closingTS}; final FederalFundsFutureTransactionDefinition futureTransactionDefinition = new FederalFundsFutureTransactionDefinition(FUTURE_SECURITY_DEFINITION, QUANTITY, referenceDate, TRADE_PRICE); final FederalFundsFutureSecurity securityConverted = FUTURE_SECURITY_DEFINITION.toDerivative(referenceDate, fixingTS); final FederalFundsFutureTransaction transactionExpected = new FederalFundsFutureTransaction(securityConverted, QUANTITY, TRADE_PRICE); final FederalFundsFutureTransaction transactionConverted = futureTransactionDefinition.toDerivative(referenceDate, data); assertEquals("Fed fund future transaction definition: toDerivative", transactionExpected, transactionConverted); } }
/* * #%L * BroadleafCommerce Framework * %% * Copyright (C) 2009 - 2013 Broadleaf Commerce * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.broadleafcommerce.core.payment.service; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.broadleafcommerce.common.payment.PaymentAdditionalFieldType; import org.broadleafcommerce.common.payment.PaymentGatewayType; import org.broadleafcommerce.common.payment.PaymentType; import org.broadleafcommerce.common.payment.dto.AddressDTO; import org.broadleafcommerce.common.payment.dto.GatewayCustomerDTO; import org.broadleafcommerce.common.payment.dto.PaymentResponseDTO; import org.broadleafcommerce.common.payment.service.PaymentGatewayCheckoutService; import org.broadleafcommerce.common.payment.service.PaymentGatewayConfiguration; import org.broadleafcommerce.common.web.payment.controller.PaymentGatewayAbstractController; import org.broadleafcommerce.core.checkout.service.CheckoutService; import org.broadleafcommerce.core.checkout.service.exception.CheckoutException; import org.broadleafcommerce.core.checkout.service.workflow.CheckoutResponse; import org.broadleafcommerce.core.order.domain.FulfillmentGroup; import org.broadleafcommerce.core.order.domain.NullOrderImpl; import org.broadleafcommerce.core.order.domain.Order; import org.broadleafcommerce.core.order.service.FulfillmentGroupService; import org.broadleafcommerce.core.order.service.OrderService; import org.broadleafcommerce.core.order.service.type.OrderStatus; import org.broadleafcommerce.core.payment.domain.OrderPayment; import org.broadleafcommerce.core.payment.domain.PaymentTransaction; import org.broadleafcommerce.profile.core.domain.Address; import org.broadleafcommerce.profile.core.domain.Country; import org.broadleafcommerce.profile.core.domain.Customer; import org.broadleafcommerce.profile.core.domain.Phone; import org.broadleafcommerce.profile.core.domain.State; import org.broadleafcommerce.profile.core.service.AddressService; import org.broadleafcommerce.profile.core.service.CountryService; import org.broadleafcommerce.profile.core.service.PhoneService; import org.broadleafcommerce.profile.core.service.StateService; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import java.util.ArrayList; import java.util.List; import java.util.Map.Entry; import javax.annotation.Resource; /** * Core framework implementation of the {@link PaymentGatewayCheckoutService}. * * @see {@link PaymentGatewayAbstractController} * @author Phillip Verheyden (phillipuniverse) */ @Service("blPaymentGatewayCheckoutService") public class DefaultPaymentGatewayCheckoutService implements PaymentGatewayCheckoutService { private static final Log LOG = LogFactory.getLog(DefaultPaymentGatewayCheckoutService.class); @Resource(name = "blOrderService") protected OrderService orderService; @Resource(name = "blOrderPaymentService") protected OrderPaymentService orderPaymentService; @Resource(name = "blCheckoutService") protected CheckoutService checkoutService; @Resource(name = "blAddressService") protected AddressService addressService; @Resource(name = "blStateService") protected StateService stateService; @Resource(name = "blCountryService") protected CountryService countryService; @Resource(name = "blPhoneService") protected PhoneService phoneService; @Resource(name = "blFulfillmentGroupService") protected FulfillmentGroupService fulfillmentGroupService; @Value("${default.payment.gateway.checkout.useGatewayBillingAddress}") protected boolean useBillingAddressFromGateway = true; @Override public Long applyPaymentToOrder(PaymentResponseDTO responseDTO, PaymentGatewayConfiguration config) { //Payments can ONLY be parsed into Order Payments if they are 'valid' if (!responseDTO.isValid()) { throw new IllegalArgumentException("Invalid payment responses cannot be parsed into the order payment domain"); } if (config == null) { throw new IllegalArgumentException("Config service cannot be null"); } Long orderId = Long.parseLong(responseDTO.getOrderId()); Order order = orderService.findOrderById(orderId); if (!OrderStatus.IN_PROCESS.equals(order.getStatus()) && !OrderStatus.CSR_OWNED.equals(order.getStatus())) { throw new IllegalArgumentException("Cannot apply another payment to an Order that is not IN_PROCESS or CSR_OWNED"); } Customer customer = order.getCustomer(); if (customer.isAnonymous()) { GatewayCustomerDTO<PaymentResponseDTO> gatewayCustomer = responseDTO.getCustomer(); if (StringUtils.isEmpty(customer.getFirstName()) && gatewayCustomer != null) { customer.setFirstName(gatewayCustomer.getFirstName()); } if (StringUtils.isEmpty(customer.getLastName()) && gatewayCustomer != null) { customer.setLastName(gatewayCustomer.getLastName()); } if (StringUtils.isEmpty(customer.getEmailAddress()) && gatewayCustomer != null) { customer.setEmailAddress(gatewayCustomer.getEmail()); } } // If the gateway sends back an email address and the order does not contain one, set it. GatewayCustomerDTO<PaymentResponseDTO> gatewayCustomer = responseDTO.getCustomer(); if (order.getEmailAddress() == null && gatewayCustomer != null) { order.setEmailAddress(gatewayCustomer.getEmail()); } // If the gateway sends back Shipping Information, we will save that to the first shippable fulfillment group. populateShippingInfo(responseDTO, order); // ALWAYS create a new order payment for the payment that comes in. Invalid payments should be cleaned up by // invoking {@link #markPaymentAsInvalid}. OrderPayment payment = orderPaymentService.create(); payment.setType(responseDTO.getPaymentType()); payment.setPaymentGatewayType(responseDTO.getPaymentGatewayType()); payment.setAmount(responseDTO.getAmount()); // If this gateway does not support multiple payments then mark all of the existing payments // as invalid before adding the new one List<OrderPayment> paymentsToInvalidate = new ArrayList<OrderPayment>(); Address tempBillingAddress = null; if (!config.handlesMultiplePayments()) { PaymentGatewayType gateway = config.getGatewayType(); for (OrderPayment p : order.getPayments()) { // A Payment on the order will be invalidated if: // - It's a temporary order payment: There may be a temporary Order Payment on the Order (e.g. to save the billing address) // - The payment being added is a Final Payment and there already exists a Final Payment // - The payment being added has the same gateway type of an existing one. if (PaymentGatewayType.TEMPORARY.equals(p.getGatewayType()) || (p.isFinalPayment() && payment.isFinalPayment()) || (p.getGatewayType() != null && p.getGatewayType().equals(gateway))) { paymentsToInvalidate.add(p); if (PaymentType.CREDIT_CARD.equals(p.getType()) && PaymentGatewayType.TEMPORARY.equals(p.getGatewayType()) ) { tempBillingAddress = p.getBillingAddress(); } } } } for (OrderPayment invalid : paymentsToInvalidate) { order.getPayments().remove(invalid); markPaymentAsInvalid(invalid.getId()); } // The billing address that will be saved on the order will be parsed off the // Response DTO sent back from the Gateway as it may have Address Verification or Standardization. // If you do not wish to use the Billing Address coming back from the Gateway, you can override the // populateBillingInfo() method or set the useBillingAddressFromGateway property. populateBillingInfo(responseDTO, payment, tempBillingAddress); // Create the transaction for the payment PaymentTransaction transaction = orderPaymentService.createTransaction(); transaction.setAmount(responseDTO.getAmount()); transaction.setRawResponse(responseDTO.getRawResponse()); transaction.setSuccess(responseDTO.isSuccessful()); transaction.setType(responseDTO.getPaymentTransactionType()); for (Entry<String, String> entry : responseDTO.getResponseMap().entrySet()) { transaction.getAdditionalFields().put(entry.getKey(), entry.getValue()); } //Set the Credit Card Info on the Additional Fields Map if (PaymentType.CREDIT_CARD.equals(responseDTO.getPaymentType()) && responseDTO.getCreditCard().creditCardPopulated()) { transaction.getAdditionalFields().put(PaymentAdditionalFieldType.NAME_ON_CARD.getType(), responseDTO.getCreditCard().getCreditCardHolderName()); transaction.getAdditionalFields().put(PaymentAdditionalFieldType.CARD_TYPE.getType(), responseDTO.getCreditCard().getCreditCardType()); transaction.getAdditionalFields().put(PaymentAdditionalFieldType.EXP_DATE.getType(), responseDTO.getCreditCard().getCreditCardExpDate()); transaction.getAdditionalFields().put(PaymentAdditionalFieldType.LAST_FOUR.getType(), responseDTO.getCreditCard().getCreditCardLastFour()); } //TODO: validate that this particular type of transaction can be added to the payment (there might already // be an AUTHORIZE transaction, for instance) //Persist the order payment as well as its transaction payment.setOrder(order); transaction.setOrderPayment(payment); payment.addTransaction(transaction); payment = orderPaymentService.save(payment); if (transaction.getSuccess()) { orderService.addPaymentToOrder(order, payment, null); } else { // We will have to mark the entire payment as invalid and boot the user to re-enter their // billing info and payment information as there may be an error either with the billing address/or credit card handleUnsuccessfulTransaction(payment); } return payment.getId(); } protected void populateBillingInfo(PaymentResponseDTO responseDTO, OrderPayment payment, Address tempBillingAddress) { Address billingAddress = tempBillingAddress; if (responseDTO.getBillTo() != null && isUseBillingAddressFromGateway()) { billingAddress = addressService.create(); AddressDTO<PaymentResponseDTO> billToDTO = responseDTO.getBillTo(); billingAddress.setFirstName(billToDTO.getAddressFirstName()); billingAddress.setLastName(billToDTO.getAddressLastName()); billingAddress.setAddressLine1(billToDTO.getAddressLine1()); billingAddress.setAddressLine2(billToDTO.getAddressLine2()); billingAddress.setCity(billToDTO.getAddressCityLocality()); //TODO: what happens if State and Country cannot be found? State state = null; if(billToDTO.getAddressStateRegion() != null) { state = stateService.findStateByAbbreviation(billToDTO.getAddressStateRegion()); } if (state == null) { LOG.warn("The given state from the response: " + billToDTO.getAddressStateRegion() + " could not be found" + " as a state abbreviation in BLC_STATE"); } billingAddress.setState(state); billingAddress.setPostalCode(billToDTO.getAddressPostalCode()); Country country = null; if (billToDTO.getAddressCountryCode() != null) { country = countryService.findCountryByAbbreviation(billToDTO.getAddressCountryCode()); } if (country == null) { LOG.warn("The given country from the response: " + billToDTO.getAddressCountryCode() + " could not be found" + " as a country abbreviation in BLC_COUNTRY"); } billingAddress.setCountry(country); if (billToDTO.getAddressPhone() != null) { Phone billingPhone = phoneService.create(); billingPhone.setPhoneNumber(billToDTO.getAddressPhone()); billingAddress.setPhonePrimary(billingPhone); } } payment.setBillingAddress(billingAddress); } protected void populateShippingInfo(PaymentResponseDTO responseDTO, Order order) { FulfillmentGroup shippableFulfillmentGroup = fulfillmentGroupService.getFirstShippableFulfillmentGroup(order); Address shippingAddress = null; if (responseDTO.getShipTo() != null && shippableFulfillmentGroup != null) { shippingAddress = addressService.create(); AddressDTO<PaymentResponseDTO> shipToDTO = responseDTO.getShipTo(); shippingAddress.setFirstName(shipToDTO.getAddressFirstName()); shippingAddress.setLastName(shipToDTO.getAddressLastName()); shippingAddress.setAddressLine1(shipToDTO.getAddressLine1()); shippingAddress.setAddressLine2(shipToDTO.getAddressLine2()); shippingAddress.setCity(shipToDTO.getAddressCityLocality()); State state = null; if(shipToDTO.getAddressStateRegion() != null) { state = stateService.findStateByAbbreviation(shipToDTO.getAddressStateRegion()); } if (state == null) { LOG.warn("The given state from the response: " + shipToDTO.getAddressStateRegion() + " could not be found" + " as a state abbreviation in BLC_STATE"); } shippingAddress.setState(state); shippingAddress.setPostalCode(shipToDTO.getAddressPostalCode()); Country country = null; if (shipToDTO.getAddressCountryCode() != null) { country = countryService.findCountryByAbbreviation(shipToDTO.getAddressCountryCode()); } if (country == null) { LOG.warn("The given country from the response: " + shipToDTO.getAddressCountryCode() + " could not be found" + " as a country abbreviation in BLC_COUNTRY"); } shippingAddress.setCountry(country); if (shipToDTO.getAddressPhone() != null) { Phone shippingPhone = phoneService.create(); shippingPhone.setPhoneNumber(shipToDTO.getAddressPhone()); shippingAddress.setPhonePrimary(shippingPhone); } shippableFulfillmentGroup = fulfillmentGroupService.findFulfillmentGroupById(shippableFulfillmentGroup.getId()); if (shippableFulfillmentGroup != null) { shippableFulfillmentGroup.setAddress(shippingAddress); fulfillmentGroupService.save(shippableFulfillmentGroup); } } } /** * This default implementation will mark the entire payment as invalid and boot the user to re-enter their * billing info and payment information as there may be an error with either the billing address or credit card. * This is the safest method, because depending on the implementation of the Gateway, we may not know exactly where * the error occurred (e.g. Address Verification enabled, etc...) So, we will assume that the error invalidates * the entire Order Payment, and the customer will have to re-enter their billing and credit card information to be * processed again. * * @param payment */ protected void handleUnsuccessfulTransaction(OrderPayment payment) { markPaymentAsInvalid(payment.getId()); } @Override public void markPaymentAsInvalid(Long orderPaymentId) { OrderPayment payment = orderPaymentService.readPaymentById(orderPaymentId); if (payment == null) { throw new IllegalArgumentException("Could not find payment with id " + orderPaymentId); } // Do not do an actual delete here, otherwise Hibernate will screw up the relationships by setting parent transactions // to null because of the cascades. This manifests itself when you have an AUTHORIZE_AND_CAPTURE transaction and // then an immediate VOID (like if there is an exception in the checkout workflow). The VOID transaction should // have its parent set to the AUTHORIZE_AND_CAPTURE transaction which works up until we call Hibernate's delete // on the payment. By cascading down to the transaction, Hibernate goes and removes the parentTransaction relationship // from the VOID transaction // The fix is to set archived statuses manually and not rely on Hibernate's @SqlDelete payment.setArchived('Y'); for (PaymentTransaction transaction : payment.getTransactions()) { transaction.setArchived('Y'); } } @Override public String initiateCheckout(Long orderId) throws Exception{ Order order = orderService.findOrderById(orderId, true); if (order == null || order instanceof NullOrderImpl) { throw new IllegalArgumentException("Could not order with id " + orderId); } CheckoutResponse response; try { response = checkoutService.performCheckout(order); } catch (CheckoutException e) { throw new Exception(e); } if (response.getOrder().getOrderNumber() == null) { LOG.error("Order Number for Order ID: " + order.getId() + " is null."); } return response.getOrder().getOrderNumber(); } @Override public String lookupOrderNumberFromOrderId(PaymentResponseDTO responseDTO) { Order order = orderService.findOrderById(Long.parseLong(responseDTO.getOrderId()), true); if (order == null) { throw new IllegalArgumentException("An order with ID " + responseDTO.getOrderId() + " cannot be found for the" + " given payment response."); } return order.getOrderNumber(); } public boolean isUseBillingAddressFromGateway() { return useBillingAddressFromGateway; } public void setUseBillingAddressFromGateway(boolean useBillingAddressFromGateway) { this.useBillingAddressFromGateway = useBillingAddressFromGateway; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/monitoring/v3/service.proto package com.google.monitoring.v3; /** * * * <pre> * A `TimeSeriesRatio` specifies two `TimeSeries` to use for computing the * `good_service / total_service` ratio. The specified `TimeSeries` must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. The `TimeSeriesRatio` must specify * exactly two of good, bad, and total, and the relationship `good_service + * bad_service = total_service` will be assumed. * </pre> * * Protobuf type {@code google.monitoring.v3.TimeSeriesRatio} */ public final class TimeSeriesRatio extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.monitoring.v3.TimeSeriesRatio) TimeSeriesRatioOrBuilder { private static final long serialVersionUID = 0L; // Use TimeSeriesRatio.newBuilder() to construct. private TimeSeriesRatio(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TimeSeriesRatio() { goodServiceFilter_ = ""; badServiceFilter_ = ""; totalServiceFilter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TimeSeriesRatio(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TimeSeriesRatio( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 34: { java.lang.String s = input.readStringRequireUtf8(); goodServiceFilter_ = s; break; } case 42: { java.lang.String s = input.readStringRequireUtf8(); badServiceFilter_ = s; break; } case 50: { java.lang.String s = input.readStringRequireUtf8(); totalServiceFilter_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.monitoring.v3.ServiceMonitoringProto .internal_static_google_monitoring_v3_TimeSeriesRatio_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.monitoring.v3.ServiceMonitoringProto .internal_static_google_monitoring_v3_TimeSeriesRatio_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.monitoring.v3.TimeSeriesRatio.class, com.google.monitoring.v3.TimeSeriesRatio.Builder.class); } public static final int GOOD_SERVICE_FILTER_FIELD_NUMBER = 4; private volatile java.lang.Object goodServiceFilter_; /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying good service provided. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string good_service_filter = 4;</code> * * @return The goodServiceFilter. */ @java.lang.Override public java.lang.String getGoodServiceFilter() { java.lang.Object ref = goodServiceFilter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); goodServiceFilter_ = s; return s; } } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying good service provided. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string good_service_filter = 4;</code> * * @return The bytes for goodServiceFilter. */ @java.lang.Override public com.google.protobuf.ByteString getGoodServiceFilterBytes() { java.lang.Object ref = goodServiceFilter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); goodServiceFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int BAD_SERVICE_FILTER_FIELD_NUMBER = 5; private volatile java.lang.Object badServiceFilter_; /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying bad service, either demanded service * that was not provided or demanded service that was of inadequate quality. * Must have `ValueType = DOUBLE` or `ValueType = INT64` and must have * `MetricKind = DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string bad_service_filter = 5;</code> * * @return The badServiceFilter. */ @java.lang.Override public java.lang.String getBadServiceFilter() { java.lang.Object ref = badServiceFilter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); badServiceFilter_ = s; return s; } } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying bad service, either demanded service * that was not provided or demanded service that was of inadequate quality. * Must have `ValueType = DOUBLE` or `ValueType = INT64` and must have * `MetricKind = DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string bad_service_filter = 5;</code> * * @return The bytes for badServiceFilter. */ @java.lang.Override public com.google.protobuf.ByteString getBadServiceFilterBytes() { java.lang.Object ref = badServiceFilter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); badServiceFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TOTAL_SERVICE_FILTER_FIELD_NUMBER = 6; private volatile java.lang.Object totalServiceFilter_; /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying total demanded service. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string total_service_filter = 6;</code> * * @return The totalServiceFilter. */ @java.lang.Override public java.lang.String getTotalServiceFilter() { java.lang.Object ref = totalServiceFilter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); totalServiceFilter_ = s; return s; } } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying total demanded service. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string total_service_filter = 6;</code> * * @return The bytes for totalServiceFilter. */ @java.lang.Override public com.google.protobuf.ByteString getTotalServiceFilterBytes() { java.lang.Object ref = totalServiceFilter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); totalServiceFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(goodServiceFilter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, goodServiceFilter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(badServiceFilter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, badServiceFilter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(totalServiceFilter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, totalServiceFilter_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(goodServiceFilter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, goodServiceFilter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(badServiceFilter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, badServiceFilter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(totalServiceFilter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, totalServiceFilter_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.monitoring.v3.TimeSeriesRatio)) { return super.equals(obj); } com.google.monitoring.v3.TimeSeriesRatio other = (com.google.monitoring.v3.TimeSeriesRatio) obj; if (!getGoodServiceFilter().equals(other.getGoodServiceFilter())) return false; if (!getBadServiceFilter().equals(other.getBadServiceFilter())) return false; if (!getTotalServiceFilter().equals(other.getTotalServiceFilter())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + GOOD_SERVICE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getGoodServiceFilter().hashCode(); hash = (37 * hash) + BAD_SERVICE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getBadServiceFilter().hashCode(); hash = (37 * hash) + TOTAL_SERVICE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getTotalServiceFilter().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.monitoring.v3.TimeSeriesRatio parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.TimeSeriesRatio parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.TimeSeriesRatio parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.TimeSeriesRatio parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.TimeSeriesRatio parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.TimeSeriesRatio parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.TimeSeriesRatio parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.monitoring.v3.TimeSeriesRatio parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.monitoring.v3.TimeSeriesRatio parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.monitoring.v3.TimeSeriesRatio parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.monitoring.v3.TimeSeriesRatio parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.monitoring.v3.TimeSeriesRatio parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.monitoring.v3.TimeSeriesRatio prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A `TimeSeriesRatio` specifies two `TimeSeries` to use for computing the * `good_service / total_service` ratio. The specified `TimeSeries` must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. The `TimeSeriesRatio` must specify * exactly two of good, bad, and total, and the relationship `good_service + * bad_service = total_service` will be assumed. * </pre> * * Protobuf type {@code google.monitoring.v3.TimeSeriesRatio} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.monitoring.v3.TimeSeriesRatio) com.google.monitoring.v3.TimeSeriesRatioOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.monitoring.v3.ServiceMonitoringProto .internal_static_google_monitoring_v3_TimeSeriesRatio_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.monitoring.v3.ServiceMonitoringProto .internal_static_google_monitoring_v3_TimeSeriesRatio_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.monitoring.v3.TimeSeriesRatio.class, com.google.monitoring.v3.TimeSeriesRatio.Builder.class); } // Construct using com.google.monitoring.v3.TimeSeriesRatio.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); goodServiceFilter_ = ""; badServiceFilter_ = ""; totalServiceFilter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.monitoring.v3.ServiceMonitoringProto .internal_static_google_monitoring_v3_TimeSeriesRatio_descriptor; } @java.lang.Override public com.google.monitoring.v3.TimeSeriesRatio getDefaultInstanceForType() { return com.google.monitoring.v3.TimeSeriesRatio.getDefaultInstance(); } @java.lang.Override public com.google.monitoring.v3.TimeSeriesRatio build() { com.google.monitoring.v3.TimeSeriesRatio result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.monitoring.v3.TimeSeriesRatio buildPartial() { com.google.monitoring.v3.TimeSeriesRatio result = new com.google.monitoring.v3.TimeSeriesRatio(this); result.goodServiceFilter_ = goodServiceFilter_; result.badServiceFilter_ = badServiceFilter_; result.totalServiceFilter_ = totalServiceFilter_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.monitoring.v3.TimeSeriesRatio) { return mergeFrom((com.google.monitoring.v3.TimeSeriesRatio) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.monitoring.v3.TimeSeriesRatio other) { if (other == com.google.monitoring.v3.TimeSeriesRatio.getDefaultInstance()) return this; if (!other.getGoodServiceFilter().isEmpty()) { goodServiceFilter_ = other.goodServiceFilter_; onChanged(); } if (!other.getBadServiceFilter().isEmpty()) { badServiceFilter_ = other.badServiceFilter_; onChanged(); } if (!other.getTotalServiceFilter().isEmpty()) { totalServiceFilter_ = other.totalServiceFilter_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.monitoring.v3.TimeSeriesRatio parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.monitoring.v3.TimeSeriesRatio) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object goodServiceFilter_ = ""; /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying good service provided. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string good_service_filter = 4;</code> * * @return The goodServiceFilter. */ public java.lang.String getGoodServiceFilter() { java.lang.Object ref = goodServiceFilter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); goodServiceFilter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying good service provided. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string good_service_filter = 4;</code> * * @return The bytes for goodServiceFilter. */ public com.google.protobuf.ByteString getGoodServiceFilterBytes() { java.lang.Object ref = goodServiceFilter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); goodServiceFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying good service provided. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string good_service_filter = 4;</code> * * @param value The goodServiceFilter to set. * @return This builder for chaining. */ public Builder setGoodServiceFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } goodServiceFilter_ = value; onChanged(); return this; } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying good service provided. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string good_service_filter = 4;</code> * * @return This builder for chaining. */ public Builder clearGoodServiceFilter() { goodServiceFilter_ = getDefaultInstance().getGoodServiceFilter(); onChanged(); return this; } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying good service provided. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string good_service_filter = 4;</code> * * @param value The bytes for goodServiceFilter to set. * @return This builder for chaining. */ public Builder setGoodServiceFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); goodServiceFilter_ = value; onChanged(); return this; } private java.lang.Object badServiceFilter_ = ""; /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying bad service, either demanded service * that was not provided or demanded service that was of inadequate quality. * Must have `ValueType = DOUBLE` or `ValueType = INT64` and must have * `MetricKind = DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string bad_service_filter = 5;</code> * * @return The badServiceFilter. */ public java.lang.String getBadServiceFilter() { java.lang.Object ref = badServiceFilter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); badServiceFilter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying bad service, either demanded service * that was not provided or demanded service that was of inadequate quality. * Must have `ValueType = DOUBLE` or `ValueType = INT64` and must have * `MetricKind = DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string bad_service_filter = 5;</code> * * @return The bytes for badServiceFilter. */ public com.google.protobuf.ByteString getBadServiceFilterBytes() { java.lang.Object ref = badServiceFilter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); badServiceFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying bad service, either demanded service * that was not provided or demanded service that was of inadequate quality. * Must have `ValueType = DOUBLE` or `ValueType = INT64` and must have * `MetricKind = DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string bad_service_filter = 5;</code> * * @param value The badServiceFilter to set. * @return This builder for chaining. */ public Builder setBadServiceFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } badServiceFilter_ = value; onChanged(); return this; } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying bad service, either demanded service * that was not provided or demanded service that was of inadequate quality. * Must have `ValueType = DOUBLE` or `ValueType = INT64` and must have * `MetricKind = DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string bad_service_filter = 5;</code> * * @return This builder for chaining. */ public Builder clearBadServiceFilter() { badServiceFilter_ = getDefaultInstance().getBadServiceFilter(); onChanged(); return this; } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying bad service, either demanded service * that was not provided or demanded service that was of inadequate quality. * Must have `ValueType = DOUBLE` or `ValueType = INT64` and must have * `MetricKind = DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string bad_service_filter = 5;</code> * * @param value The bytes for badServiceFilter to set. * @return This builder for chaining. */ public Builder setBadServiceFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); badServiceFilter_ = value; onChanged(); return this; } private java.lang.Object totalServiceFilter_ = ""; /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying total demanded service. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string total_service_filter = 6;</code> * * @return The totalServiceFilter. */ public java.lang.String getTotalServiceFilter() { java.lang.Object ref = totalServiceFilter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); totalServiceFilter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying total demanded service. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string total_service_filter = 6;</code> * * @return The bytes for totalServiceFilter. */ public com.google.protobuf.ByteString getTotalServiceFilterBytes() { java.lang.Object ref = totalServiceFilter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); totalServiceFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying total demanded service. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string total_service_filter = 6;</code> * * @param value The totalServiceFilter to set. * @return This builder for chaining. */ public Builder setTotalServiceFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } totalServiceFilter_ = value; onChanged(); return this; } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying total demanded service. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string total_service_filter = 6;</code> * * @return This builder for chaining. */ public Builder clearTotalServiceFilter() { totalServiceFilter_ = getDefaultInstance().getTotalServiceFilter(); onChanged(); return this; } /** * * * <pre> * A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) * specifying a `TimeSeries` quantifying total demanded service. Must have * `ValueType = DOUBLE` or `ValueType = INT64` and must have `MetricKind = * DELTA` or `MetricKind = CUMULATIVE`. * </pre> * * <code>string total_service_filter = 6;</code> * * @param value The bytes for totalServiceFilter to set. * @return This builder for chaining. */ public Builder setTotalServiceFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); totalServiceFilter_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.monitoring.v3.TimeSeriesRatio) } // @@protoc_insertion_point(class_scope:google.monitoring.v3.TimeSeriesRatio) private static final com.google.monitoring.v3.TimeSeriesRatio DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.monitoring.v3.TimeSeriesRatio(); } public static com.google.monitoring.v3.TimeSeriesRatio getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TimeSeriesRatio> PARSER = new com.google.protobuf.AbstractParser<TimeSeriesRatio>() { @java.lang.Override public TimeSeriesRatio parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TimeSeriesRatio(input, extensionRegistry); } }; public static com.google.protobuf.Parser<TimeSeriesRatio> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TimeSeriesRatio> getParserForType() { return PARSER; } @java.lang.Override public com.google.monitoring.v3.TimeSeriesRatio getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver12; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnSetPktinSuppressionReplyVer12 implements OFBsnSetPktinSuppressionReply { private static final Logger logger = LoggerFactory.getLogger(OFBsnSetPktinSuppressionReplyVer12.class); // version: 1.2 final static byte WIRE_VERSION = 3; final static int LENGTH = 20; private final static long DEFAULT_XID = 0x0L; private final static long DEFAULT_STATUS = 0x0L; // OF message fields private final long xid; private final long status; // // Immutable default instance final static OFBsnSetPktinSuppressionReplyVer12 DEFAULT = new OFBsnSetPktinSuppressionReplyVer12( DEFAULT_XID, DEFAULT_STATUS ); // package private constructor - used by readers, builders, and factory OFBsnSetPktinSuppressionReplyVer12(long xid, long status) { this.xid = U32.normalize(xid); this.status = U32.normalize(status); } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_12; } @Override public OFType getType() { return OFType.EXPERIMENTER; } @Override public long getXid() { return xid; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x19L; } @Override public long getStatus() { return status; } public OFBsnSetPktinSuppressionReply.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnSetPktinSuppressionReply.Builder { final OFBsnSetPktinSuppressionReplyVer12 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean statusSet; private long status; BuilderWithParent(OFBsnSetPktinSuppressionReplyVer12 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_12; } @Override public OFType getType() { return OFType.EXPERIMENTER; } @Override public long getXid() { return xid; } @Override public OFBsnSetPktinSuppressionReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x19L; } @Override public long getStatus() { return status; } @Override public OFBsnSetPktinSuppressionReply.Builder setStatus(long status) { this.status = status; this.statusSet = true; return this; } @Override public OFBsnSetPktinSuppressionReply build() { long xid = this.xidSet ? this.xid : parentMessage.xid; long status = this.statusSet ? this.status : parentMessage.status; // return new OFBsnSetPktinSuppressionReplyVer12( xid, status ); } } static class Builder implements OFBsnSetPktinSuppressionReply.Builder { // OF message fields private boolean xidSet; private long xid; private boolean statusSet; private long status; @Override public OFVersion getVersion() { return OFVersion.OF_12; } @Override public OFType getType() { return OFType.EXPERIMENTER; } @Override public long getXid() { return xid; } @Override public OFBsnSetPktinSuppressionReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x19L; } @Override public long getStatus() { return status; } @Override public OFBsnSetPktinSuppressionReply.Builder setStatus(long status) { this.status = status; this.statusSet = true; return this; } // @Override public OFBsnSetPktinSuppressionReply build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; long status = this.statusSet ? this.status : DEFAULT_STATUS; return new OFBsnSetPktinSuppressionReplyVer12( xid, status ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnSetPktinSuppressionReply> { @Override public OFBsnSetPktinSuppressionReply readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 3 byte version = bb.readByte(); if(version != (byte) 0x3) throw new OFParseError("Wrong version: Expected=OFVersion.OF_12(3), got="+version); // fixed value property type == 4 byte type = bb.readByte(); if(type != (byte) 0x4) throw new OFParseError("Wrong type: Expected=OFType.EXPERIMENTER(4), got="+type); int length = U16.f(bb.readShort()); if(length != 20) throw new OFParseError("Wrong length: Expected=20(20), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property experimenter == 0x5c16c7L int experimenter = bb.readInt(); if(experimenter != 0x5c16c7) throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter); // fixed value property subtype == 0x19L int subtype = bb.readInt(); if(subtype != 0x19) throw new OFParseError("Wrong subtype: Expected=0x19L(0x19L), got="+subtype); long status = U32.f(bb.readInt()); OFBsnSetPktinSuppressionReplyVer12 bsnSetPktinSuppressionReplyVer12 = new OFBsnSetPktinSuppressionReplyVer12( xid, status ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnSetPktinSuppressionReplyVer12); return bsnSetPktinSuppressionReplyVer12; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnSetPktinSuppressionReplyVer12Funnel FUNNEL = new OFBsnSetPktinSuppressionReplyVer12Funnel(); static class OFBsnSetPktinSuppressionReplyVer12Funnel implements Funnel<OFBsnSetPktinSuppressionReplyVer12> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnSetPktinSuppressionReplyVer12 message, PrimitiveSink sink) { // fixed value property version = 3 sink.putByte((byte) 0x3); // fixed value property type = 4 sink.putByte((byte) 0x4); // fixed value property length = 20 sink.putShort((short) 0x14); sink.putLong(message.xid); // fixed value property experimenter = 0x5c16c7L sink.putInt(0x5c16c7); // fixed value property subtype = 0x19L sink.putInt(0x19); sink.putLong(message.status); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnSetPktinSuppressionReplyVer12> { @Override public void write(ByteBuf bb, OFBsnSetPktinSuppressionReplyVer12 message) { // fixed value property version = 3 bb.writeByte((byte) 0x3); // fixed value property type = 4 bb.writeByte((byte) 0x4); // fixed value property length = 20 bb.writeShort((short) 0x14); bb.writeInt(U32.t(message.xid)); // fixed value property experimenter = 0x5c16c7L bb.writeInt(0x5c16c7); // fixed value property subtype = 0x19L bb.writeInt(0x19); bb.writeInt(U32.t(message.status)); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnSetPktinSuppressionReplyVer12("); b.append("xid=").append(xid); b.append(", "); b.append("status=").append(status); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnSetPktinSuppressionReplyVer12 other = (OFBsnSetPktinSuppressionReplyVer12) obj; if( xid != other.xid) return false; if( status != other.status) return false; return true; } @Override public boolean equalsIgnoreXid(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnSetPktinSuppressionReplyVer12 other = (OFBsnSetPktinSuppressionReplyVer12) obj; // ignore XID if( status != other.status) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * (int) (status ^ (status >>> 32)); return result; } @Override public int hashCodeIgnoreXid() { final int prime = 31; int result = 1; // ignore XID result = prime * (int) (status ^ (status >>> 32)); return result; } }
package org.apereo.cas.adaptors.duo.config; import org.apereo.cas.adaptors.duo.DuoHealthIndicator; import org.apereo.cas.adaptors.duo.authn.DuoCredential; import org.apereo.cas.adaptors.duo.authn.DuoDirectCredential; import org.apereo.cas.adaptors.duo.authn.DuoMultifactorAuthenticationProvider; import org.apereo.cas.adaptors.duo.authn.DuoSecurityAuthenticationHandler; import org.apereo.cas.adaptors.duo.authn.DuoSecurityMultifactorAuthenticationProviderFactory; import org.apereo.cas.adaptors.duo.web.DuoSecurityPingEndpoint; import org.apereo.cas.adaptors.duo.web.DuoSecurityUserAccountStatusEndpoint; import org.apereo.cas.adaptors.duo.web.flow.action.DuoSecurityDetermineUserAccountAction; import org.apereo.cas.adaptors.duo.web.flow.action.DuoSecurityPrepareWebLoginFormAction; import org.apereo.cas.adaptors.duo.web.flow.config.DuoSecurityMultifactorWebflowConfigurer; import org.apereo.cas.authentication.AuthenticationEventExecutionPlanConfigurer; import org.apereo.cas.authentication.AuthenticationHandler; import org.apereo.cas.authentication.AuthenticationMetaDataPopulator; import org.apereo.cas.authentication.MultifactorAuthenticationProviderBean; import org.apereo.cas.authentication.MultifactorAuthenticationProviderFactoryBean; import org.apereo.cas.authentication.bypass.ChainingMultifactorAuthenticationProviderBypass; import org.apereo.cas.authentication.handler.ByCredentialTypeAuthenticationHandlerResolver; import org.apereo.cas.authentication.metadata.AuthenticationContextAttributeMetaDataPopulator; import org.apereo.cas.authentication.principal.PrincipalFactory; import org.apereo.cas.authentication.principal.PrincipalFactoryUtils; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.configuration.model.support.mfa.DuoSecurityMultifactorProperties; import org.apereo.cas.services.ServicesManager; import org.apereo.cas.util.http.HttpClient; import org.apereo.cas.web.flow.CasWebflowConfigurer; import org.apereo.cas.web.flow.CasWebflowExecutionPlan; import org.apereo.cas.web.flow.CasWebflowExecutionPlanConfigurer; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.BeanCreationException; import org.springframework.beans.factory.ObjectProvider; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.actuate.autoconfigure.endpoint.condition.ConditionalOnEnabledEndpoint; import org.springframework.boot.actuate.autoconfigure.health.ConditionalOnEnabledHealthIndicator; import org.springframework.boot.actuate.health.HealthIndicator; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.context.config.annotation.RefreshScope; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; import org.springframework.web.context.support.GenericWebApplicationContext; import org.springframework.webflow.definition.registry.FlowDefinitionRegistry; import org.springframework.webflow.engine.builder.support.FlowBuilderServices; import org.springframework.webflow.execution.Action; import java.util.Collection; import java.util.stream.Collectors; /** * This is {@link DuoSecurityAuthenticationEventExecutionPlanConfiguration}. * * @author Misagh Moayyed * @author Dmitriy Kopylenko * @since 5.1.0 */ @Configuration("duoSecurityAuthenticationEventExecutionPlanConfiguration") @EnableConfigurationProperties(CasConfigurationProperties.class) @Slf4j public class DuoSecurityAuthenticationEventExecutionPlanConfiguration implements CasWebflowExecutionPlanConfigurer { @Autowired private GenericWebApplicationContext applicationContext; @Autowired private CasConfigurationProperties casProperties; @Autowired @Qualifier("loginFlowRegistry") private ObjectProvider<FlowDefinitionRegistry> loginFlowDefinitionRegistry; @Autowired private ObjectProvider<FlowBuilderServices> flowBuilderServices; @Autowired @Qualifier("noRedirectHttpClient") private ObjectProvider<HttpClient> httpClient; @Autowired @Qualifier("servicesManager") private ObjectProvider<ServicesManager> servicesManager; @Autowired @Qualifier("duoSecurityBypassEvaluator") private ObjectProvider<ChainingMultifactorAuthenticationProviderBypass> duoSecurityBypassEvaluator; @ConditionalOnMissingBean(name = "duoPrincipalFactory") @Bean public PrincipalFactory duoPrincipalFactory() { return PrincipalFactoryUtils.newPrincipalFactory(); } @Bean public Action prepareDuoWebLoginFormAction() { return new DuoSecurityPrepareWebLoginFormAction(); } @ConditionalOnMissingBean(name = "determineDuoUserAccountAction") @Bean public Action determineDuoUserAccountAction() { return new DuoSecurityDetermineUserAccountAction(); } @ConditionalOnMissingBean(name = "duoProviderFactory") @Bean @RefreshScope public MultifactorAuthenticationProviderFactoryBean<DuoMultifactorAuthenticationProvider, DuoSecurityMultifactorProperties> duoProviderFactory() { return new DuoSecurityMultifactorAuthenticationProviderFactory(httpClient.getIfAvailable(), duoSecurityBypassEvaluator.getIfAvailable()); } @ConditionalOnMissingBean(name = "duoProviderBean") @Bean @RefreshScope public MultifactorAuthenticationProviderBean<DuoMultifactorAuthenticationProvider, DuoSecurityMultifactorProperties> duoProviderBean() { return new MultifactorAuthenticationProviderBean(duoProviderFactory(), applicationContext.getDefaultListableBeanFactory(), casProperties.getAuthn().getMfa().getDuo()); } @RefreshScope @Bean public Collection<AuthenticationHandler> duoAuthenticationHandler() { val duos = casProperties.getAuthn().getMfa().getDuo() .stream() .filter(d -> StringUtils.isNotBlank(d.getDuoApplicationKey()) && StringUtils.isNotBlank(d.getDuoApiHost()) && StringUtils.isNotBlank(d.getDuoIntegrationKey()) && StringUtils.isNotBlank(d.getDuoSecretKey())) .collect(Collectors.toList()); if (duos.isEmpty()) { throw new BeanCreationException("No configuration/settings could be found for Duo Security. Review settings and ensure the correct syntax is used"); } return duos.stream() .map(d -> new DuoSecurityAuthenticationHandler(d.getId(), servicesManager.getIfAvailable(), duoPrincipalFactory(), duoProviderBean().getProvider(d.getId()), d.getOrder()) ).collect(Collectors.toList()); } @ConditionalOnMissingBean(name = "duoMultifactorWebflowConfigurer") @Bean @DependsOn("defaultWebflowConfigurer") public CasWebflowConfigurer duoMultifactorWebflowConfigurer() { val deviceRegistrationEnabled = casProperties.getAuthn().getMfa().getTrusted().isDeviceRegistrationEnabled(); return new DuoSecurityMultifactorWebflowConfigurer(flowBuilderServices.getIfAvailable(), loginFlowDefinitionRegistry.getIfAvailable(), deviceRegistrationEnabled, applicationContext, casProperties); } private AuthenticationMetaDataPopulator duoAuthenticationMetaDataPopulator(final AuthenticationHandler authenticationHandler) { return new AuthenticationContextAttributeMetaDataPopulator( casProperties.getAuthn().getMfa().getAuthenticationContextAttribute(), authenticationHandler, duoProviderBean().getProvider(authenticationHandler.getName()).getId() ); } @ConditionalOnMissingBean(name = "duoSecurityAuthenticationEventExecutionPlanConfigurer") @Bean public AuthenticationEventExecutionPlanConfigurer duoSecurityAuthenticationEventExecutionPlanConfigurer() { return plan -> { duoAuthenticationHandler() .forEach(dh -> { plan.registerAuthenticationHandler(dh); plan.registerAuthenticationMetadataPopulator(duoAuthenticationMetaDataPopulator(dh)); }); plan.registerAuthenticationHandlerResolver(new ByCredentialTypeAuthenticationHandlerResolver(DuoCredential.class, DuoDirectCredential.class)); }; } @Override public void configureWebflowExecutionPlan(final CasWebflowExecutionPlan plan) { plan.registerWebflowConfigurer(duoMultifactorWebflowConfigurer()); } @Bean @ConditionalOnEnabledHealthIndicator("duoSecurityHealthIndicator") public HealthIndicator duoSecurityHealthIndicator() { return new DuoHealthIndicator(applicationContext); } @Bean @ConditionalOnEnabledEndpoint public DuoSecurityPingEndpoint duoPingEndpoint() { return new DuoSecurityPingEndpoint(casProperties, applicationContext); } @Bean @ConditionalOnEnabledEndpoint public DuoSecurityUserAccountStatusEndpoint duoAccountStatusEndpoint() { return new DuoSecurityUserAccountStatusEndpoint(casProperties, applicationContext); } }
package com.romanov_v.parser; import com.romanov_v.parser.grammar.Grammar; import com.romanov_v.parser.grammar.Rule; import com.romanov_v.parser.grammar.Term; import java.util.*; import java.util.stream.Collectors; /** * Created by vlad on 11/03/16. */ public class SimpleBnfParser { private String statement; private int position = 0; private SimpleBnfParser(String statement) { this.statement = statement; } // <rule> | <rule> <syntax> public static Grammar parse(String statement) throws ParserException { String[] statements = statement.split("\n"); List<String> started = Arrays.asList(statements).stream() .map(String::trim) .filter((s) -> !s.isEmpty()) .collect(Collectors.toList()); List<String> list = new ArrayList<>(); String prev = ""; for (String str : started) { if (!prev.isEmpty() && prev.charAt(prev.length() - 1) == '|') { prev += str; continue; } if (str.charAt(0) == '|' || prev.isEmpty()) { prev += str; continue; } list.add(prev); prev = str; } if (!prev.isEmpty()) { list.add(prev); } List<Rule> rules = new ArrayList<>(); for (String stmnt : list) { stmnt = stmnt.trim(); if (stmnt.isEmpty()) { continue; } SimpleBnfParser simpleBnfParser = new SimpleBnfParser(stmnt); List<Rule> rule = simpleBnfParser.rule(); rules.addAll(rule); } return new Grammar(rules); } // <opt-whitespace> "<" <rule-name> ">" <opt-whitespace> "::=" <opt-whitespace> <expression> <line-end> private List<Rule> rule() throws ParserException { // Rule name skipWhitespaces(); List<Rule> rules = new ArrayList<>(); String ruleName = getTextBetween('<', '>'); skipWhitespaces(); match("::="); skipWhitespaces(); rules.addAll(list(ruleName)); return rules; } // <list> | <list> <opt-whitespace> "|" <opt-whitespace> <expression> private List<Rule> list(String name) throws ParserException { List<Rule> expressions = new ArrayList<>(); expressions.add(new Rule(name, expression())); skipWhitespaces(); while (hasNext() && statement.charAt(position) == '|') { next(); skipWhitespaces(); expressions.add(new Rule(name, expression())); skipWhitespaces(); } return expressions; } // <term> | <term> <opt-whitespace> <expression> private List<Term> expression() throws ParserException { List<Term> expression = new ArrayList<>(); expression.addAll(term()); skipWhitespaces(); while (hasNext() && statement.charAt(position) != '|') { expression.addAll(term()); skipWhitespaces(); } return expression; } // <literal> | "<" <rule-name> ">" private List<Term> term() throws ParserException { char current = statement.charAt(position); List<Term> terms = new ArrayList<>(); switch (current) { case '<': String ruleName = getTextBetween('<', '>'); terms.add(Term.createRuleTerm(ruleName)); break; case '"': case '\'': String text = literal(); for (int i = 0; i < text.length(); i++) { terms.add(Term.createTextTerm(text.charAt(i))); } break; default: throw createParserException("Unexpected symbol '" + current + "'"); } return terms; } // '"' <text> '"' | "'" <text> "'" private String literal() throws ParserException { char start = statement.charAt(position); return getTextBetween(start, start); } private boolean hasNext() { return statement.length() > position; } private char next() throws ParserException { try { return statement.charAt(position++); } catch (StringIndexOutOfBoundsException e) { position--; throw createParserException("Unexpected end"); } } private String getTextBetween(char from, char to) throws ParserException { match(from); StringBuilder stringBuilder = new StringBuilder(); for (char c = next(); c != to; c = next()) { stringBuilder.append(c); } return stringBuilder.toString(); } private void match(String str) throws ParserException { int position = this.position; boolean res = true; for (int i = 0; i < str.length(); i++) { res &= next() == str.charAt(i); } if (!res) { throw new ParserException("'" + str + "' expected", statement, position); } } private void match(char c) throws ParserException { if (next() != c) { throw createParserException("'" + c + "' expected"); } } private void skipWhitespaces() throws ParserException { if (position >= statement.length()) { return; } Character c = statement.charAt(position); while (Character.isWhitespace(c)) { position++; if (position >= statement.length()) { return; } c = statement.charAt(position); } } private ParserException createParserException(String message) { return new ParserException(message, statement, position); } }
/* * The MIT License (MIT) * * Copyright (c) 2007-2015 Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.broad.igv.sam; import org.apache.log4j.Logger; import org.broad.igv.feature.Range; import org.broad.igv.feature.Strand; import java.util.*; /** * Packs alignments such that there is no overlap * * @author jrobinso */ public class AlignmentPacker { private static final Logger log = Logger.getLogger(AlignmentPacker.class); /** * Minimum gap between the end of one alignment and start of another. */ public static final int MIN_ALIGNMENT_SPACING = 2; private static final Comparator<Alignment> lengthComparator = new Comparator<Alignment>() { public int compare(Alignment row1, Alignment row2) { return (row2.getEnd() - row2.getStart()) - (row1.getEnd() - row2.getStart()); } }; private static final String NULL_GROUP_VALUE = ""; public static final int tenMB = 10000000; /** * Allocates each alignment to row such that there is no overlap. */ public PackedAlignments packAlignments( AlignmentInterval interval, AlignmentTrack.RenderOptions renderOptions) { // if (renderOptions == null) renderOptions = new AlignmentTrack.RenderOptions(); LinkedHashMap<String, List<Row>> packedAlignments = new LinkedHashMap<String, List<Row>>(); List<Alignment> alList = interval.getAlignments(); // TODO -- means to undo this if (renderOptions.isLinkedReads()) { alList = linkByTag(alList, renderOptions.getLinkByTag()); } if (renderOptions.groupByOption == null) { List<Row> alignmentRows = new ArrayList<>(10000); pack(alList, renderOptions, alignmentRows); packedAlignments.put("", alignmentRows); } else { // Separate alignments into groups. Map<String, List<Alignment>> groupedAlignments = new HashMap<String, List<Alignment>>(); Iterator<Alignment> iter = alList.iterator(); while (iter.hasNext()) { Alignment alignment = iter.next(); String groupKey = getGroupValue(alignment, renderOptions); if (groupKey == null) { groupKey = NULL_GROUP_VALUE; } List<Alignment> groupList = groupedAlignments.get(groupKey); if (groupList == null) { groupList = new ArrayList<>(1000); groupedAlignments.put(groupKey, groupList); } groupList.add(alignment); } // Now alphabetize (sort) and pack the groups List<String> keys = new ArrayList<String>(groupedAlignments.keySet()); Comparator<String> groupComparator = getGroupComparator(renderOptions.groupByOption); Collections.sort(keys, groupComparator); for (String key : keys) { List<Row> alignmentRows = new ArrayList<>(10000); List<Alignment> group = groupedAlignments.get(key); pack(group, renderOptions, alignmentRows); packedAlignments.put(key, alignmentRows); } } List<AlignmentInterval> tmp = new ArrayList<AlignmentInterval>(); tmp.add(interval); return new PackedAlignments(tmp, packedAlignments); } private void pack(List<Alignment> alList, AlignmentTrack.RenderOptions renderOptions, List<Row> alignmentRows) { Map<String, PairedAlignment> pairs = null; boolean isPairedAlignments = renderOptions.isViewPairs(); String linkByTag = renderOptions.getLinkByTag(); if (isPairedAlignments) { pairs = new HashMap<>(1000); } // Allocate alignemnts to buckets for each range. // We use priority queues to keep the buckets sorted by alignment length. However this is probably a needless // complication, any collection type would do. int totalCount = 0; if (alList == null || alList.size() == 0) return; Range curRange = getAlignmentListRange(alList); BucketCollection bucketCollection; // Use dense buckets for < 10,000,000 bp windows sparse otherwise int bpLength = curRange.getLength(); if (bpLength < tenMB) { bucketCollection = new DenseBucketCollection(bpLength, curRange); } else { bucketCollection = new SparseBucketCollection(curRange); } int curRangeStart = curRange.getStart(); for (Alignment al : alList) { if (al.isMapped()) { Alignment alignment = al; // Pair alignments -- do not pair secondaryalignments if (isPairedAlignments && isPairable(al)) { String readName = al.getReadName(); PairedAlignment pair = pairs.get(readName); if (pair == null) { pair = new PairedAlignment(al); pairs.put(readName, pair); alignment = pair; } else { // Add second alignment to pair. pair.setSecondAlignment(al); pairs.remove(readName); continue; } } // Allocate to bucket. // Negative "bucketNumbers" can arise with soft clips at the left edge of the chromosome. Allocate // these alignments to the first bucket. int bucketNumber = Math.max(0, al.getStart() - curRangeStart); if (bucketNumber < bucketCollection.getBucketCount()) { PriorityQueue<Alignment> bucket = bucketCollection.get(bucketNumber); if (bucket == null) { bucket = new PriorityQueue<Alignment>(5, lengthComparator); bucketCollection.set(bucketNumber, bucket); } bucket.add(alignment); totalCount++; } else { log.debug("Alignment out of bounds. name: " + alignment.getReadName() + " startPos:" + alignment.getStart()); } } } bucketCollection.finishedAdding(); // Now allocate alignments to rows. long t0 = System.currentTimeMillis(); int allocatedCount = 0; Row currentRow = new Row(); while (allocatedCount < totalCount) { curRange = bucketCollection.getRange(); curRangeStart = curRange.getStart(); int nextStart = curRangeStart; List<Integer> emptyBuckets = new ArrayList<Integer>(100); while (true) { int bucketNumber = nextStart - curRangeStart; PriorityQueue<Alignment> bucket = bucketCollection.getNextBucket(bucketNumber, emptyBuckets); // Pull the next alignment out of the bucket and add to the current row if (bucket != null) { Alignment alignment = bucket.remove(); currentRow.addAlignment(alignment); allocatedCount++; nextStart = alignment.getEnd() + MIN_ALIGNMENT_SPACING; } //Reached the end of this range, move to the next if (bucket == null || nextStart > curRange.getEnd()) { //Remove empty buckets. This has no affect on the dense implementation, //they are removed on the fly, but is needed for the sparse implementation bucketCollection.removeBuckets(emptyBuckets); emptyBuckets.clear(); break; } } // We've reached the end of the interval, start a new row if (currentRow.alignments.size() > 0) { alignmentRows.add(currentRow); } currentRow = new Row(); } if (log.isDebugEnabled()) { long dt = System.currentTimeMillis() - t0; log.debug("Packed alignments in " + dt); } // Add the last row if (currentRow != null && currentRow.alignments.size() > 0) { alignmentRows.add(currentRow); } } private boolean isPairable(Alignment al) { return al.isPrimary() && al.isPaired() && al.getMate().isMapped() && al.getMate().getChr().equals(al.getChr()); } private List<Alignment> linkByTag(List<Alignment> alList, String tag) { List<Alignment> bcList = new ArrayList<>(alList.size() / 10); Map<Object, LinkedAlignment> map = new HashMap<>(bcList.size() * 2); for (Alignment a : alList) { if(a.isPrimary()) { Object bc; if("READNAME".equals(tag)) { bc = a.getReadName(); if(a.isPaired()) { bc += a.isFirstOfPair() ? "/1" : "/2"; } } else { bc = a.getAttribute(tag); } if (bc == null) { bcList.add(a); } else { LinkedAlignment linkedAlignment = map.get(bc); if (linkedAlignment == null) { linkedAlignment = new LinkedAlignment(tag, bc.toString()); map.put(bc, linkedAlignment); bcList.add(linkedAlignment); } linkedAlignment.addAlignment(a); } } else { // Don't link secondary reads bcList.add(a); } } // Now copy list, de-linking orhpaned alignments (alignments with no linked mates) List<Alignment> delinkedList = new ArrayList<>(alList.size()); for(Alignment a : bcList) { if(a instanceof LinkedAlignment) { final List<Alignment> alignments = ((LinkedAlignment) a).alignments; if(alignments.size() == 1) { delinkedList.add(alignments.get(0)); } else { a.finish(); delinkedList.add(a); } } else { delinkedList.add(a); } } return delinkedList; } /** * Gets the range over which alignmentsList spans. Asssumes all on same chr, and sorted * * @param alignmentsList * @return */ private Range getAlignmentListRange(List<Alignment> alignmentsList) { if (alignmentsList == null || alignmentsList.size() == 0) return null; Alignment firstAlignment = alignmentsList.get(0); int minStart = firstAlignment.getStart(); int maxEnd = firstAlignment.getEnd(); for (Alignment alignment : alignmentsList) { maxEnd = Math.max(maxEnd, alignment.getEnd()); } return new Range(firstAlignment.getChr(), minStart, maxEnd); } private Comparator<String> getGroupComparator(AlignmentTrack.GroupOption groupByOption) { switch (groupByOption) { case PAIR_ORIENTATION: return new PairOrientationComparator(); default: //Sort null values towards the end return new Comparator<String>() { @Override public int compare(String o1, String o2) { if (o1 == null && o2 == null) { return 0; } else if (o1 == null) { return 1; } else if (o2 == null) { return -1; } else { // no nulls if (o1.equals(o2)) { return 0; } else if (NULL_GROUP_VALUE.equals(o1)) { return 1; } if (NULL_GROUP_VALUE.equals(o2)) { return -1; } else { return o1.compareToIgnoreCase(o2); } } } }; } } private String getGroupValue(Alignment al, AlignmentTrack.RenderOptions renderOptions) { AlignmentTrack.GroupOption groupBy = renderOptions.groupByOption; String tag = renderOptions.getGroupByTag(); Range pos = renderOptions.getGroupByPos(); switch (groupBy) { case STRAND: return al.isNegativeStrand() ? "-" : "+"; case SAMPLE: return al.getSample(); case LIBRARY: return al.getLibrary(); case READ_GROUP: return al.getReadGroup(); case TAG: Object tagValue = al.getAttribute(tag); return tagValue == null ? null : tagValue.toString(); case FIRST_OF_PAIR_STRAND: Strand strand = al.getFirstOfPairStrand(); String strandString = strand == Strand.NONE ? null : strand.toString(); return strandString; case PAIR_ORIENTATION: PEStats peStats = AlignmentRenderer.getPEStats(al, renderOptions); AlignmentTrack.OrientationType type = AlignmentRenderer.getOrientationType(al, peStats); if (type == null) { return AlignmentTrack.OrientationType.UNKNOWN.name(); } return type.name(); case MATE_CHROMOSOME: ReadMate mate = al.getMate(); if (mate == null) { return null; } if (mate.isMapped() == false) { return "UNMAPPED"; } else { return mate.getChr(); } case SUPPLEMENTARY: return al.isSupplementary() ? "SUPPLEMENTARY" : ""; case BASE_AT_POS: // Use a string prefix to enforce grouping rules: // 1: alignments with a base at the position // 2: alignments with a gap at the position // 3: alignment that do not overlap the position (or are on a different chromosome) if (al.getChr().equals(pos.getChr()) && al.getAlignmentStart() <= pos.getStart() && al.getAlignmentEnd() > pos.getStart()) { byte[] baseAtPos = new byte[] {al.getBase(pos.getStart())}; if (baseAtPos[0] == 0) { // gap at position return "2:"; } else { // base at position return "1:" + new String(baseAtPos); } } else { // does not overlap position return "3:"; } } return null; } interface BucketCollection { Range getRange(); void set(int idx, PriorityQueue<Alignment> bucket); PriorityQueue<Alignment> get(int idx); PriorityQueue<Alignment> getNextBucket(int bucketNumber, Collection<Integer> emptyBuckets); void removeBuckets(Collection<Integer> emptyBuckets); void finishedAdding(); int getBucketCount(); } /** * Dense array implementation of BucketCollection. Assumption is all or nearly all the genome region is covered * with reads. */ static class DenseBucketCollection implements BucketCollection { Range range; int lastBucketNumber = -1; final PriorityQueue[] bucketArray; DenseBucketCollection(int bucketCount, Range range) { this.bucketArray = new PriorityQueue[bucketCount]; this.range = range; } public void set(int idx, PriorityQueue<Alignment> bucket) { bucketArray[idx] = bucket; } public PriorityQueue<Alignment> get(int idx) { return bucketArray[idx]; } public int getBucketCount() { return this.bucketArray.length; } public Range getRange() { return range; } /** * Return the next occupied bucket after bucketNumber * * @param bucketNumber * @param emptyBuckets ignored * @return */ public PriorityQueue<Alignment> getNextBucket(int bucketNumber, Collection<Integer> emptyBuckets) { if (bucketNumber == lastBucketNumber) { // TODO -- detect inf loop here } PriorityQueue<Alignment> bucket = null; while (bucketNumber < bucketArray.length) { if (bucketNumber < 0) { log.info("Negative bucket number: " + bucketNumber); } bucket = bucketArray[bucketNumber]; if (bucket != null) { if (bucket.isEmpty()) { bucketArray[bucketNumber] = null; } else { return bucket; } } bucketNumber++; } return null; } public void removeBuckets(Collection<Integer> emptyBuckets) { // Nothing to do, empty buckets are removed "on the fly" } public void finishedAdding() { // nothing to do } } /** * "Sparse" implementation of an alignment BucketCollection. Assumption is there are small clusters of alignments * along the genome, with mostly "white space". */ static class SparseBucketCollection implements BucketCollection { Range range; boolean finished = false; List<Integer> keys; final HashMap<Integer, PriorityQueue<Alignment>> buckets; SparseBucketCollection(Range range) { this.range = range; this.buckets = new HashMap(1000); } public void set(int idx, PriorityQueue<Alignment> bucket) { if (finished) { log.error("Error: bucket added after finishAdding() called"); } buckets.put(idx, bucket); } public PriorityQueue<Alignment> get(int idx) { return buckets.get(idx); } public Range getRange() { return range; } /** * Return the next occupied bucket at or after after bucketNumber. * * @param bucketNumber -- the hash bucket index for the alignments, essential the position relative to the start * of this packing interval * @return the next occupied bucket at or after bucketNumber, or null if there are none. */ public PriorityQueue<Alignment> getNextBucket(int bucketNumber, Collection<Integer> emptyBuckets) { PriorityQueue<Alignment> bucket = null; int min = 0; int max = keys.size() - 1; // Get close to the right index, rather than scan from the beginning while ((max - min) > 5) { int mid = (max + min) / 2; Integer key = keys.get(mid); if (key > bucketNumber) { max = mid; } else { min = mid; } } // Now march from min to max until we cross bucketNumber for (int i = min; i < keys.size(); i++) { Integer key = keys.get(i); if (key >= bucketNumber) { bucket = buckets.get(key); if (bucket.isEmpty()) { emptyBuckets.add(key); bucket = null; } else { return bucket; } } } return null; // No bucket found } public void removeBuckets(Collection<Integer> emptyBuckets) { if (emptyBuckets.isEmpty()) { return; } for (Integer i : emptyBuckets) { buckets.remove(i); } keys = new ArrayList<Integer>(buckets.keySet()); Collections.sort(keys); } public void finishedAdding() { finished = true; keys = new ArrayList<Integer>(buckets.keySet()); Collections.sort(keys); } public int getBucketCount() { return Integer.MAX_VALUE; } } private class PairOrientationComparator implements Comparator<String> { private final List<AlignmentTrack.OrientationType> orientationTypes; //private final Set<String> orientationNames = new HashSet<String>(AlignmentTrack.OrientationType.values().length); public PairOrientationComparator() { orientationTypes = Arrays.asList(AlignmentTrack.OrientationType.values()); // for(AlignmentTrack.OrientationType type: orientationTypes){ // orientationNames.add(type.name()); // } } @Override public int compare(String s0, String s1) { if (s0 != null && s1 != null) { AlignmentTrack.OrientationType t0 = AlignmentTrack.OrientationType.valueOf(s0); AlignmentTrack.OrientationType t1 = AlignmentTrack.OrientationType.valueOf(s1); return orientationTypes.indexOf(t0) - orientationTypes.indexOf(t1); } else if (s0 == null ^ s1 == null) { //exactly one is null return s0 == null ? 1 : -1; } else { //both null return 0; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.security.ldap; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.naming.NamingException; import javax.naming.directory.Attributes; import javax.naming.directory.SearchControls; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.configuration.Configuration; import org.apache.ambari.server.security.authorization.AmbariLdapUtils; import org.apache.ambari.server.security.authorization.Group; import org.apache.ambari.server.security.authorization.LdapServerProperties; import org.apache.ambari.server.security.authorization.User; import org.apache.ambari.server.security.authorization.Users; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.ldap.control.PagedResultsDirContextProcessor; import org.springframework.ldap.core.AttributesMapper; import org.springframework.ldap.core.ContextMapper; import org.springframework.ldap.core.DirContextAdapter; import org.springframework.ldap.core.LdapTemplate; import org.springframework.ldap.core.support.LdapContextSource; import org.springframework.ldap.filter.AndFilter; import org.springframework.ldap.filter.EqualsFilter; import org.springframework.ldap.filter.Filter; import org.springframework.ldap.filter.HardcodedFilter; import org.springframework.ldap.filter.LikeFilter; import org.springframework.ldap.filter.OrFilter; import org.springframework.ldap.support.LdapUtils; import org.springframework.security.core.userdetails.UsernameNotFoundException; import com.google.common.collect.Sets; import com.google.inject.Inject; /** * Provides users, groups and membership population from LDAP catalog. */ public class AmbariLdapDataPopulator { /** * Log. */ private static final Logger LOG = LoggerFactory.getLogger(AmbariLdapDataPopulator.class); /** * Ambari configuration. */ private Configuration configuration; /** * Highlevel facade for management of users and groups. */ private Users users; /** * LDAP specific properties. */ protected LdapServerProperties ldapServerProperties; /** * LDAP template for making search queries. */ private LdapTemplate ldapTemplate; // Constants private static final String UID_ATTRIBUTE = "uid"; private static final String OBJECT_CLASS_ATTRIBUTE = "objectClass"; private static final int USERS_PAGE_SIZE = 500; // REGEXP to check member attribute starts with "cn=" or "uid=" - case insensitive private static final String IS_MEMBER_DN_REGEXP = "^(?i)(uid|cn|%s|%s)=.*$"; private static final String MEMBER_ATTRIBUTE_REPLACE_STRING = "${member}"; private static final String MEMBER_ATTRIBUTE_VALUE_PLACEHOLDER = "{member}"; /** * Construct an AmbariLdapDataPopulator. * * @param configuration the Ambari configuration * @param users utility that provides access to Users */ @Inject public AmbariLdapDataPopulator(Configuration configuration, Users users) { this.configuration = configuration; this.users = users; this.ldapServerProperties = configuration.getLdapServerProperties(); } /** * Check if LDAP is enabled in server properties. * * @return true if enabled */ public boolean isLdapEnabled() { if (!configuration.isLdapConfigured()) { return false; } try { final LdapTemplate ldapTemplate = loadLdapTemplate(); ldapTemplate.search(ldapServerProperties.getBaseDN(), "uid=dummy_search", new AttributesMapper() { @Override public Object mapFromAttributes(Attributes arg0) throws NamingException { return null; } }); return true; } catch (Exception ex) { LOG.error("Could not connect to LDAP server - " + ex.getMessage()); return false; } } /** * Retrieves information about external groups and users and their synced/unsynced state. * * @return dto with information */ public LdapSyncDto getLdapSyncInfo() { final LdapSyncDto syncInfo = new LdapSyncDto(); final Map<String, Group> internalGroupsMap = getInternalGroups(); final Set<LdapGroupDto> externalGroups = getExternalLdapGroupInfo(); for (LdapGroupDto externalGroup : externalGroups) { if (internalGroupsMap.containsKey(externalGroup.getGroupName()) && internalGroupsMap.get(externalGroup.getGroupName()).isLdapGroup()) { externalGroup.setSynced(true); } else { externalGroup.setSynced(false); } } final Map<String, User> internalUsersMap = getInternalUsers(); final Set<LdapUserDto> externalUsers = getExternalLdapUserInfo(); for (LdapUserDto externalUser : externalUsers) { String userName = externalUser.getUserName(); if (internalUsersMap.containsKey(userName) && internalUsersMap.get(userName).isLdapUser()) { externalUser.setSynced(true); } else { externalUser.setSynced(false); } } syncInfo.setGroups(externalGroups); syncInfo.setUsers(externalUsers); return syncInfo; } /** * Performs synchronization of all groups. * * @throws AmbariException if synchronization failed for any reason */ public LdapBatchDto synchronizeAllLdapGroups(LdapBatchDto batchInfo) throws AmbariException { LOG.trace("Synchronize All LDAP groups..."); Set<LdapGroupDto> externalLdapGroupInfo = getExternalLdapGroupInfo(); final Map<String, Group> internalGroupsMap = getInternalGroups(); final Map<String, User> internalUsersMap = getInternalUsers(); for (LdapGroupDto groupDto : externalLdapGroupInfo) { String groupName = groupDto.getGroupName(); addLdapGroup(batchInfo, internalGroupsMap, groupName); refreshGroupMembers(batchInfo, groupDto, internalUsersMap, internalGroupsMap, null, false); } for (Entry<String, Group> internalGroup : internalGroupsMap.entrySet()) { if (internalGroup.getValue().isLdapGroup()) { batchInfo.getGroupsToBeRemoved().add(internalGroup.getValue().getGroupName()); } } return batchInfo; } /** * Performs synchronization of given sets of all users. * * @throws AmbariException if synchronization failed for any reason */ public LdapBatchDto synchronizeAllLdapUsers(LdapBatchDto batchInfo) throws AmbariException { LOG.trace("Synchronize All LDAP users..."); Set<LdapUserDto> externalLdapUserInfo = getExternalLdapUserInfo(); Map<String, User> internalUsersMap = getInternalUsers(); for (LdapUserDto userDto : externalLdapUserInfo) { String userName = userDto.getUserName(); if (internalUsersMap.containsKey(userName)) { final User user = internalUsersMap.get(userName); if (user != null && !user.isLdapUser()) { if (Configuration.LdapUsernameCollisionHandlingBehavior.SKIP == configuration.getLdapSyncCollisionHandlingBehavior()) { LOG.info("User '{}' skipped because it is local user", userName); batchInfo.getUsersSkipped().add(userName); } else { batchInfo.getUsersToBecomeLdap().add(userName); LOG.trace("Convert user '{}' to LDAP user.", userName); } } internalUsersMap.remove(userName); } else { batchInfo.getUsersToBeCreated().add(userName); } } for (Entry<String, User> internalUser : internalUsersMap.entrySet()) { if (internalUser.getValue().isLdapUser()) { batchInfo.getUsersToBeRemoved().add(internalUser.getValue().getUserName()); } } return batchInfo; } /** * Performs synchronization of given set of groupnames. * * @param groups set of groups to synchronize * @throws AmbariException if synchronization failed for any reason */ public LdapBatchDto synchronizeLdapGroups(Set<String> groups, LdapBatchDto batchInfo) throws AmbariException { LOG.trace("Synchronize LDAP groups..."); final Set<LdapGroupDto> specifiedGroups = new HashSet<>(); for (String group : groups) { Set<LdapGroupDto> groupDtos = getLdapGroups(group); if (groupDtos.isEmpty()) { throw new AmbariException("Couldn't sync LDAP group " + group + ", it doesn't exist"); } specifiedGroups.addAll(groupDtos); } final Map<String, Group> internalGroupsMap = getInternalGroups(); final Map<String, User> internalUsersMap = getInternalUsers(); for (LdapGroupDto groupDto : specifiedGroups) { String groupName = groupDto.getGroupName(); addLdapGroup(batchInfo, internalGroupsMap, groupName); refreshGroupMembers(batchInfo, groupDto, internalUsersMap, internalGroupsMap, null, true); } return batchInfo; } /** * Performs synchronization of given set of user names. * * @param users set of users to synchronize * @throws AmbariException if synchronization failed for any reason */ public LdapBatchDto synchronizeLdapUsers(Set<String> users, LdapBatchDto batchInfo) throws AmbariException { LOG.trace("Synchronize LDAP users..."); final Set<LdapUserDto> specifiedUsers = new HashSet<>(); for (String user : users) { Set<LdapUserDto> userDtos = getLdapUsers(user); if (userDtos.isEmpty()) { throw new AmbariException("Couldn't sync LDAP user " + user + ", it doesn't exist"); } specifiedUsers.addAll(userDtos); } final Map<String, User> internalUsersMap = getInternalUsers(); for (LdapUserDto userDto : specifiedUsers) { String userName = userDto.getUserName(); if (internalUsersMap.containsKey(userName)) { final User user = internalUsersMap.get(userName); if (user != null && !user.isLdapUser()) { if (Configuration.LdapUsernameCollisionHandlingBehavior.SKIP == configuration.getLdapSyncCollisionHandlingBehavior()) { LOG.info("User '{}' skipped because it is local user", userName); batchInfo.getUsersSkipped().add(userName); } else { batchInfo.getUsersToBecomeLdap().add(userName); } } internalUsersMap.remove(userName); } else { batchInfo.getUsersToBeCreated().add(userName); } } return batchInfo; } /** * Performs synchronization of existent users and groups. * * @throws AmbariException if synchronization failed for any reason */ public LdapBatchDto synchronizeExistingLdapGroups(LdapBatchDto batchInfo) throws AmbariException { LOG.trace("Synchronize Existing LDAP groups..."); final Map<String, Group> internalGroupsMap = getInternalGroups(); final Map<String, User> internalUsersMap = getInternalUsers(); final Set<Group> internalGroupSet = Sets.newHashSet(internalGroupsMap.values()); for (Group group : internalGroupSet) { if (group.isLdapGroup()) { Set<LdapGroupDto> groupDtos = getLdapGroups(group.getGroupName()); if (groupDtos.isEmpty()) { batchInfo.getGroupsToBeRemoved().add(group.getGroupName()); } else { LdapGroupDto groupDto = groupDtos.iterator().next(); refreshGroupMembers(batchInfo, groupDto, internalUsersMap, internalGroupsMap, null, true); } } } return batchInfo; } /** * Performs synchronization of existent users and groups. * * @throws AmbariException if synchronization failed for any reason */ public LdapBatchDto synchronizeExistingLdapUsers(LdapBatchDto batchInfo) throws AmbariException { LOG.trace("Synchronize Existing LDAP users..."); final Map<String, User> internalUsersMap = getInternalUsers(); for (User user : internalUsersMap.values()) { if (user.isLdapUser()) { Set<LdapUserDto> userDtos = getLdapUsers(user.getUserName()); if (userDtos.isEmpty()) { batchInfo.getUsersToBeRemoved().add(user.getUserName()); } } } return batchInfo; } /** * Check group members of the synced group: add missing ones and remove the ones absent in external LDAP. * * @param batchInfo batch update object * @param group ldap group * @param internalUsers map of internal users * @param groupMemberAttributes set of group member attributes that have already been refreshed * @param recursive if disabled, it won't refresh members recursively (its not needed in case of all groups are processed) * @throws AmbariException if group refresh failed */ protected void refreshGroupMembers(LdapBatchDto batchInfo, LdapGroupDto group, Map<String, User> internalUsers, Map<String, Group> internalGroupsMap, Set<String> groupMemberAttributes, boolean recursive) throws AmbariException { Set<String> externalMembers = new HashSet<>(); if (groupMemberAttributes == null) { groupMemberAttributes = new HashSet<>(); } for (String memberAttributeValue : group.getMemberAttributes()) { LdapUserDto groupMember = getLdapUserByMemberAttr(memberAttributeValue); if (groupMember != null) { externalMembers.add(groupMember.getUserName()); } else { // if we haven't already processed this group if (recursive && !groupMemberAttributes.contains(memberAttributeValue)) { // if the member is another group then add all of its members LdapGroupDto subGroup = getLdapGroupByMemberAttr(memberAttributeValue); if (subGroup != null) { groupMemberAttributes.add(memberAttributeValue); addLdapGroup(batchInfo, internalGroupsMap, subGroup.getGroupName()); refreshGroupMembers(batchInfo, subGroup, internalUsers, internalGroupsMap, groupMemberAttributes, true); } } } } String groupName = group.getGroupName(); final Map<String, User> internalMembers = getInternalMembers(groupName); for (String externalMember : externalMembers) { if (internalUsers.containsKey(externalMember)) { final User user = internalUsers.get(externalMember); if (user == null) { // user is fresh and is already added to batch info if (!internalMembers.containsKey(externalMember)) { batchInfo.getMembershipToAdd().add(new LdapUserGroupMemberDto(groupName, externalMember)); } continue; } if (!user.isLdapUser()) { if (Configuration.LdapUsernameCollisionHandlingBehavior.SKIP == configuration.getLdapSyncCollisionHandlingBehavior()) { // existing user can not be converted to ldap user, so skip it LOG.info("User '{}' skipped because it is local user", externalMember); batchInfo.getUsersSkipped().add(externalMember); continue; // and remove from group } else { batchInfo.getUsersToBecomeLdap().add(externalMember); } } if (!internalMembers.containsKey(externalMember)) { batchInfo.getMembershipToAdd().add(new LdapUserGroupMemberDto(groupName, externalMember)); } internalMembers.remove(externalMember); } else { batchInfo.getUsersToBeCreated().add(externalMember); batchInfo.getMembershipToAdd().add(new LdapUserGroupMemberDto(groupName, externalMember)); } } for (Entry<String, User> userToBeUnsynced : internalMembers.entrySet()) { final User user = userToBeUnsynced.getValue(); batchInfo.getMembershipToRemove().add(new LdapUserGroupMemberDto(groupName, user.getUserName())); } } /** * Get the set of LDAP groups for the given group name. * * @param groupName the group name * @return the set of LDAP groups for the given name */ protected Set<LdapGroupDto> getLdapGroups(String groupName) { Filter groupObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getGroupObjectClass()); Filter groupNameFilter = new LikeFilter(ldapServerProperties.getGroupNamingAttr(), groupName); return getFilteredLdapGroups(ldapServerProperties.getBaseDN(), groupObjectFilter, groupNameFilter); } /** * Get the set of LDAP users for the given user name. * * @param username the user name * @return the set of LDAP users for the given name */ protected Set<LdapUserDto> getLdapUsers(String username) { Filter userObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getUserObjectClass()); Filter userNameFilter = new LikeFilter(ldapServerProperties.getUsernameAttribute(), username); return getFilteredLdapUsers(ldapServerProperties.getBaseDN(), userObjectFilter, userNameFilter); } /** * Get the LDAP user member for the given member attribute. * * @param memberAttributeValue the member attribute value * @return the user for the given member attribute; null if not found */ protected LdapUserDto getLdapUserByMemberAttr(String memberAttributeValue) { Set<LdapUserDto> filteredLdapUsers; memberAttributeValue = getUniqueIdByMemberPattern(memberAttributeValue, ldapServerProperties.getSyncUserMemberReplacePattern()); Filter syncMemberFilter = createCustomMemberFilter(memberAttributeValue, ldapServerProperties.getSyncUserMemberFilter()); if (memberAttributeValue != null && syncMemberFilter != null) { LOG.trace("Use custom filter '{}' for getting member user with default baseDN ('{}')", syncMemberFilter.encode(), ldapServerProperties.getBaseDN()); filteredLdapUsers = getFilteredLdapUsers(ldapServerProperties.getBaseDN(), syncMemberFilter); } else if (memberAttributeValue != null && isMemberAttributeBaseDn(memberAttributeValue)) { LOG.trace("Member can be used as baseDn: {}", memberAttributeValue); Filter filter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getUserObjectClass()); filteredLdapUsers = getFilteredLdapUsers(memberAttributeValue, filter); } else { LOG.trace("Member cannot be used as baseDn: {}", memberAttributeValue); Filter filter = new AndFilter() .and(new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getUserObjectClass())) .and(new EqualsFilter(ldapServerProperties.getUsernameAttribute(), memberAttributeValue)); filteredLdapUsers = getFilteredLdapUsers(ldapServerProperties.getBaseDN(), filter); } return (filteredLdapUsers.isEmpty()) ? null : filteredLdapUsers.iterator().next(); } /** * Get the LDAP group member for the given member attribute. * * @param memberAttributeValue the member attribute value * @return the group for the given member attribute; null if not found */ protected LdapGroupDto getLdapGroupByMemberAttr(String memberAttributeValue) { Set<LdapGroupDto> filteredLdapGroups; memberAttributeValue = getUniqueIdByMemberPattern(memberAttributeValue, ldapServerProperties.getSyncGroupMemberReplacePattern()); Filter syncMemberFilter = createCustomMemberFilter(memberAttributeValue, ldapServerProperties.getSyncGroupMemberFilter()); if (memberAttributeValue != null && syncMemberFilter != null) { LOG.trace("Use custom filter '{}' for getting member group with default baseDN ('{}')", syncMemberFilter.encode(), ldapServerProperties.getBaseDN()); filteredLdapGroups = getFilteredLdapGroups(ldapServerProperties.getBaseDN(), syncMemberFilter); } else if (memberAttributeValue != null && isMemberAttributeBaseDn(memberAttributeValue)) { LOG.trace("Member can be used as baseDn: {}", memberAttributeValue); Filter filter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getGroupObjectClass()); filteredLdapGroups = getFilteredLdapGroups(memberAttributeValue, filter); } else { LOG.trace("Member cannot be used as baseDn: {}", memberAttributeValue); filteredLdapGroups = getFilteredLdapGroups(ldapServerProperties.getBaseDN(), new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getGroupObjectClass()), getMemberFilter(memberAttributeValue)); } return (filteredLdapGroups.isEmpty()) ? null : filteredLdapGroups.iterator().next(); } /** * Use custom member filter. Replace {member} with the member attribute. * E.g.: (&(objectclass=posixaccount)(dn={member})) -> (&(objectclass=posixaccount)(dn=cn=mycn,dc=apache,dc=org)) */ protected Filter createCustomMemberFilter(String memberAttributeValue, String syncMemberFilter) { Filter filter = null; if (StringUtils.isNotEmpty(syncMemberFilter)) { filter = new HardcodedFilter(syncMemberFilter.replace(MEMBER_ATTRIBUTE_VALUE_PLACEHOLDER, memberAttributeValue)); } return filter; } /** * Replace memberAttribute value by a custom pattern to get the DN or id (like memberUid) of a user/group. * E.g.: memberAttribute="<sid=...><guid=...>,cn=mycn,dc=org,dc=apache" * Apply on (?<sid>.*);(?<guid>.*);(?<member>.*) pattern, then the result will be: "${member}" */ protected String getUniqueIdByMemberPattern(String memberAttributeValue, String pattern) { if (StringUtils.isNotEmpty(memberAttributeValue) && StringUtils.isNotEmpty(pattern)) { try { Pattern p = Pattern.compile(pattern); Matcher m = p.matcher(memberAttributeValue); LOG.debug("Apply replace pattern '{}' on '{}' membership attribbute value.", memberAttributeValue, pattern); if (m.matches()) { memberAttributeValue = m.replaceAll(MEMBER_ATTRIBUTE_REPLACE_STRING); LOG.debug("Membership attribute value after replace pattern applied: '{}'", memberAttributeValue); } else { LOG.warn("Membership attribute value pattern is not matched ({}) on '{}'", pattern, memberAttributeValue); } } catch (Exception e) { LOG.error("Error during replace memberAttribute '{}' with pattern '{}'", memberAttributeValue, pattern); } } return memberAttributeValue; } /** * Removes synced users which are not present in any of group. * * @throws AmbariException */ protected void cleanUpLdapUsersWithoutGroup() throws AmbariException { final List<User> allUsers = users.getAllUsers(); for (User user : allUsers) { if (user.isLdapUser() && user.getGroups().isEmpty()) { users.removeUser(user); } } } // Utility methods protected void addLdapGroup(LdapBatchDto batchInfo, Map<String, Group> internalGroupsMap, String groupName) { if (internalGroupsMap.containsKey(groupName)) { final Group group = internalGroupsMap.get(groupName); if (!group.isLdapGroup()) { batchInfo.getGroupsToBecomeLdap().add(groupName); LOG.trace("Convert group '{}' to LDAP group.", groupName); } internalGroupsMap.remove(groupName); batchInfo.getGroupsProcessedInternal().add(groupName); } else { if (!batchInfo.getGroupsProcessedInternal().contains(groupName)) { batchInfo.getGroupsToBeCreated().add(groupName); } } } /** * Determines that the member attribute can be used as a 'dn' */ protected boolean isMemberAttributeBaseDn(String memberAttributeValue) { Pattern pattern = Pattern.compile(String.format(IS_MEMBER_DN_REGEXP, ldapServerProperties.getUsernameAttribute(), ldapServerProperties.getGroupNamingAttr())); return pattern.matcher(memberAttributeValue).find(); } /** * Retrieves groups from external LDAP server. * * @return set of info about LDAP groups */ protected Set<LdapGroupDto> getExternalLdapGroupInfo() { EqualsFilter groupObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getGroupObjectClass()); return getFilteredLdapGroups(ldapServerProperties.getBaseDN(), groupObjectFilter); } // get a filter based on the given member attribute private Filter getMemberFilter(String memberAttributeValue) { String dnAttribute = ldapServerProperties.getDnAttribute(); return new OrFilter().or(new EqualsFilter(dnAttribute, memberAttributeValue)). or(new EqualsFilter(UID_ATTRIBUTE, memberAttributeValue)); } private Set<LdapGroupDto> getFilteredLdapGroups(String baseDn, Filter... filters) { AndFilter andFilter = new AndFilter(); for (Filter filter : filters) { andFilter.and(filter); } return getFilteredLdapGroups(baseDn, andFilter); } private Set<LdapGroupDto> getFilteredLdapGroups(String baseDn, Filter filter) { final Set<LdapGroupDto> groups = new HashSet<>(); final LdapTemplate ldapTemplate = loadLdapTemplate(); LOG.trace("LDAP Group Query - Base DN: '{}' ; Filter: '{}'", baseDn, filter.encode()); ldapTemplate.search(baseDn, filter.encode(), new LdapGroupContextMapper(groups, ldapServerProperties)); return groups; } /** * Retrieves users from external LDAP server. * * @return set of info about LDAP users */ protected Set<LdapUserDto> getExternalLdapUserInfo() { EqualsFilter userObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getUserObjectClass()); return getFilteredLdapUsers(ldapServerProperties.getBaseDN(), userObjectFilter); } private Set<LdapUserDto> getFilteredLdapUsers(String baseDn, Filter... filters) { AndFilter andFilter = new AndFilter(); for (Filter filter : filters) { andFilter.and(filter); } return getFilteredLdapUsers(baseDn, andFilter); } private Set<LdapUserDto> getFilteredLdapUsers(String baseDn, Filter filter) { final Set<LdapUserDto> users = new HashSet<>(); final LdapTemplate ldapTemplate = loadLdapTemplate(); PagedResultsDirContextProcessor processor = createPagingProcessor(); SearchControls searchControls = new SearchControls(); searchControls.setReturningObjFlag(true); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); LdapUserContextMapper ldapUserContextMapper = new LdapUserContextMapper(ldapServerProperties); String encodedFilter = filter.encode(); do { LOG.trace("LDAP User Query - Base DN: '{}' ; Filter: '{}'", baseDn, encodedFilter); List dtos = configuration.getLdapServerProperties().isPaginationEnabled() ? ldapTemplate.search(LdapUtils.newLdapName(baseDn), encodedFilter, searchControls, ldapUserContextMapper, processor) : ldapTemplate.search(LdapUtils.newLdapName(baseDn), encodedFilter, searchControls, ldapUserContextMapper); for (Object dto : dtos) { if (dto != null) { users.add((LdapUserDto) dto); } } } while (configuration.getLdapServerProperties().isPaginationEnabled() && processor.getCookie().getCookie() != null); return users; } /** * Creates a map of internal groups. * * @return map of GroupName-Group pairs */ protected Map<String, Group> getInternalGroups() { final List<Group> internalGroups = users.getAllGroups(); final Map<String, Group> internalGroupsMap = new HashMap<>(); for (Group group : internalGroups) { internalGroupsMap.put(group.getGroupName(), group); } return internalGroupsMap; } /** * Creates a map of internal users. * * @return map of UserName-User pairs */ protected Map<String, User> getInternalUsers() { final List<User> internalUsers = users.getAllUsers(); final Map<String, User> internalUsersMap = new HashMap<>(); LOG.trace("Get all users from Ambari Server."); for (User user : internalUsers) { internalUsersMap.put(user.getUserName(), user); } return internalUsersMap; } /** * Creates a map of internal users present in specified group. * * @param groupName group name * @return map of UserName-User pairs */ protected Map<String, User> getInternalMembers(String groupName) { final Collection<User> internalMembers = users.getGroupMembers(groupName); if (internalMembers == null) { return Collections.emptyMap(); } final Map<String, User> internalMembersMap = new HashMap<>(); for (User user : internalMembers) { internalMembersMap.put(user.getUserName(), user); } return internalMembersMap; } /** * Checks LDAP configuration for changes and reloads LDAP template if they occurred. * * @return LdapTemplate instance */ protected LdapTemplate loadLdapTemplate() { final LdapServerProperties properties = configuration .getLdapServerProperties(); if (ldapTemplate == null || !properties.equals(ldapServerProperties)) { LOG.info("Reloading properties"); ldapServerProperties = properties; final LdapContextSource ldapContextSource = createLdapContextSource(); // The LdapTemplate by design will close the connection after each call to the LDAP Server // In order to have the interaction work with large/paged results, said connection must be pooled and reused ldapContextSource.setPooled(true); final List<String> ldapUrls = ldapServerProperties.getLdapUrls(); ldapContextSource.setUrls(ldapUrls.toArray(new String[ldapUrls.size()])); if (!ldapServerProperties.isAnonymousBind()) { ldapContextSource.setUserDn(ldapServerProperties.getManagerDn()); ldapContextSource.setPassword(ldapServerProperties.getManagerPassword()); } try { ldapContextSource.afterPropertiesSet(); } catch (Exception e) { LOG.error("LDAP Context Source not loaded ", e); throw new UsernameNotFoundException("LDAP Context Source not loaded", e); } ldapContextSource.setReferral(ldapServerProperties.getReferralMethod()); ldapTemplate = createLdapTemplate(ldapContextSource); ldapTemplate.setIgnorePartialResultException(true); } return ldapTemplate; } /** * LdapContextSource factory method. * * @return new context source */ protected LdapContextSource createLdapContextSource() { return new LdapContextSource(); } /** * PagedResultsDirContextProcessor factory method. * * @return new processor; */ protected PagedResultsDirContextProcessor createPagingProcessor() { return new PagedResultsDirContextProcessor(USERS_PAGE_SIZE, null); } /** * LdapTemplate factory method. * * @param ldapContextSource the LDAP context source * @return new LDAP template */ protected LdapTemplate createLdapTemplate(LdapContextSource ldapContextSource) { return new LdapTemplate(ldapContextSource); } // // ContextMapper implementations // protected static class LdapGroupContextMapper implements ContextMapper { private final Set<LdapGroupDto> groups; private final LdapServerProperties ldapServerProperties; public LdapGroupContextMapper(Set<LdapGroupDto> groups, LdapServerProperties ldapServerProperties) { this.groups = groups; this.ldapServerProperties = ldapServerProperties; } @Override public Object mapFromContext(Object ctx) { final DirContextAdapter adapter = (DirContextAdapter) ctx; final String groupNameAttribute = adapter.getStringAttribute(ldapServerProperties.getGroupNamingAttr()); boolean outOfScope = AmbariLdapUtils.isLdapObjectOutOfScopeFromBaseDn(adapter, ldapServerProperties.getBaseDN()); if (outOfScope) { LOG.warn("Group '{}' is out of scope of the base DN. It will be skipped.", groupNameAttribute); return null; } if (groupNameAttribute != null) { final LdapGroupDto group = new LdapGroupDto(); group.setGroupName(groupNameAttribute.toLowerCase()); final String[] uniqueMembers = adapter.getStringAttributes(ldapServerProperties.getGroupMembershipAttr()); if (uniqueMembers != null) { for (String uniqueMember : uniqueMembers) { group.getMemberAttributes().add(uniqueMember.toLowerCase()); } } groups.add(group); } return null; } } protected static class LdapUserContextMapper implements ContextMapper { private final LdapServerProperties ldapServerProperties; public LdapUserContextMapper(LdapServerProperties ldapServerProperties) { this.ldapServerProperties = ldapServerProperties; } @Override public Object mapFromContext(Object ctx) { final DirContextAdapter adapter = (DirContextAdapter) ctx; final String usernameAttribute = adapter.getStringAttribute(ldapServerProperties.getUsernameAttribute()); final String uidAttribute = adapter.getStringAttribute(UID_ATTRIBUTE); boolean outOfScope = AmbariLdapUtils.isLdapObjectOutOfScopeFromBaseDn(adapter, ldapServerProperties.getBaseDN()); if (outOfScope) { LOG.warn("User '{}' is out of scope of the base DN. It will be skipped.", usernameAttribute); return null; } if (usernameAttribute != null || uidAttribute != null) { final LdapUserDto user = new LdapUserDto(); user.setUserName(usernameAttribute != null ? usernameAttribute.toLowerCase() : null); user.setUid(uidAttribute != null ? uidAttribute.toLowerCase() : null); user.setDn(adapter.getNameInNamespace().toLowerCase()); return user; } else { LOG.warn("Ignoring LDAP user " + adapter.getNameInNamespace() + " as it doesn't have required" + " attributes uid and " + ldapServerProperties.getUsernameAttribute()); } return null; } } }
/* * Copyright 2015-2018 Igor Maznitsa. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.igormaznitsa.mindmap.swing.panel.utils; import com.igormaznitsa.meta.annotation.ImplementationNote; import com.igormaznitsa.meta.annotation.MayContainNull; import com.igormaznitsa.meta.annotation.MustNotContainNull; import com.igormaznitsa.meta.annotation.ReturnsOriginal; import com.igormaznitsa.mindmap.model.Topic; import com.igormaznitsa.mindmap.model.logger.Logger; import com.igormaznitsa.mindmap.model.logger.LoggerFactory; import com.igormaznitsa.mindmap.plugins.MindMapPluginRegistry; import com.igormaznitsa.mindmap.plugins.PopUpSection; import com.igormaznitsa.mindmap.plugins.api.PluginContext; import com.igormaznitsa.mindmap.plugins.api.PopUpMenuItemPlugin; import com.igormaznitsa.mindmap.swing.panel.DialogProvider; import com.igormaznitsa.mindmap.swing.panel.MindMapPanelConfig; import com.igormaznitsa.mindmap.swing.panel.ui.AbstractCollapsableElement; import com.igormaznitsa.mindmap.swing.panel.ui.AbstractElement; import com.igormaznitsa.mindmap.swing.panel.ui.gfx.MMGraphics; import com.igormaznitsa.mindmap.swing.panel.ui.gfx.MMGraphics2DWrapper; import com.igormaznitsa.mindmap.swing.services.*; import net.iharder.Base64; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.SystemUtils; import org.jsoup.Jsoup; import org.jsoup.helper.W3CDom; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.imageio.ImageIO; import javax.swing.*; import javax.swing.tree.TreeModel; import javax.swing.tree.TreePath; import javax.xml.XMLConstants; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.awt.*; import java.awt.datatransfer.Clipboard; import java.awt.datatransfer.DataFlavor; import java.awt.event.*; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; import java.awt.image.RenderedImage; import java.io.*; import java.lang.reflect.InvocationTargetException; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.*; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; public final class Utils { public static final ResourceBundle BUNDLE = java.util.ResourceBundle.getBundle("com/igormaznitsa/mindmap/swing/panel/Bundle"); public static final UIComponentFactory UI_COMPO_FACTORY = UIComponentFactoryProvider.findInstance(); public static final ImageIconService ICON_SERVICE = ImageIconServiceProvider.findInstance(); public static final String PROPERTY_MAX_EMBEDDED_IMAGE_SIDE_SIZE = "mmap.max.image.side.size"; //NOI18N public static final boolean LTR_LANGUAGE = ComponentOrientation.getOrientation(new Locale(System.getProperty("user.language"))).isLeftToRight(); private static final Logger LOGGER = LoggerFactory.getLogger(Utils.class); private static final Pattern URI_PATTERN = Pattern.compile("^(?:([^:\\s]+):)(?://(?:[^?/@\\s]*@)?([^/?\\s]*)/?)?([^?\\s]+)?(?:\\?([^#\\s]*))?(?:#\\S*)?$"); private static final int MAX_IMAGE_SIDE_SIZE_IN_PIXELS = 350; private static final Pattern STRIP_PATTERN = Pattern.compile("^(\\s*)(.*[^\\s])(\\s*)$"); private Utils() { } /** * Get input stream for resource in zip file. * * @param zipFile zip file * @param resourcePath path to resource * @return input stream for resource or null if not found or directory * @throws IOException if there is any transport error */ @Nullable public static InputStream findInputStreamForResource(@Nonnull final ZipFile zipFile, @Nonnull final String resourcePath) throws IOException { final ZipEntry entry = zipFile.getEntry(resourcePath); InputStream result = null; if (entry != null && !entry.isDirectory()) { result = zipFile.getInputStream(entry); } return result; } /** * Read who;e zip item into byte array. * * @param zipFile zip file * @param path path to resource * @return byte array or null if not found * @throws IOException thrown if there is any transport error */ @Nullable public static byte[] toByteArray(@Nonnull final ZipFile zipFile, @Nonnull final String path) throws IOException { final InputStream in = findInputStreamForResource(zipFile, path); byte[] result = null; if (in != null) { try { result = IOUtils.toByteArray(in); } finally { IOUtils.closeQuietly(in); } } return result; } @Nonnull public static Document loadHtmlDocument(@Nonnull final InputStream inStream, @Nullable final String charset, final boolean autoClose) throws ParserConfigurationException, IOException { try { final org.jsoup.nodes.Document result = Jsoup.parse(IOUtils.toString(inStream, charset)); return new W3CDom().fromJsoup(result); } finally { if (autoClose) { IOUtils.closeQuietly(inStream); } } } /** * Load and parse XML document from input stream. * * @param inStream stream to read document * @param charset charset to be used for loading, can be null * @param autoClose true if stream must be closed, false otherwise * @return parsed document * @throws IOException will be thrown if transport error * @throws ParserConfigurationException will be thrown if parsing error * @throws SAXException will be thrown if SAX error * @since 1.4.0 */ @Nonnull public static Document loadXmlDocument(@Nonnull final InputStream inStream, @Nullable final String charset, final boolean autoClose) throws SAXException, IOException, ParserConfigurationException { final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); try { factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); } catch (final ParserConfigurationException ex) { LOGGER.error("Can't set feature for XML parser : " + ex.getMessage(), ex); throw new SAXException("Can't set flag to use security processing of XML file"); } try { factory.setFeature("http://apache.org/xml/features/validation/schema", false); factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", false); factory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); factory.setFeature("http://xml.org/sax/features/external-general-entities", false); factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); } catch (final ParserConfigurationException ex) { LOGGER.warn("Can't set some features for XML parser : " + ex.getMessage()); } factory.setIgnoringComments(true); factory.setValidating(false); final DocumentBuilder builder = factory.newDocumentBuilder(); final Document document; try { final InputStream stream; if (charset == null) { stream = inStream; } else { stream = new ByteArrayInputStream(IOUtils.toString(inStream, charset).getBytes(StandardCharsets.UTF_8)); } document = builder.parse(stream); } finally { if (autoClose) { IOUtils.closeQuietly(inStream); } } return document; } /** * Get first direct child for name. * * @param node element to find children * @param elementName name of child element * @return found first child or null if not found * @since 1.4.0 */ @Nullable public static Element findFirstElement(@Nonnull final Element node, @Nonnull final String elementName) { Element result = null; for (final Element l : Utils.findDirectChildrenForName(node, elementName)) { result = l; break; } return result; } /** * Find all direct children with defined name. * * @param element parent element * @param childElementname child element name * @return list of found elements * @since 1.4.0 */ @Nonnull @MustNotContainNull public static List<Element> findDirectChildrenForName(@Nonnull final Element element, @Nonnull final String childElementname) { final List<Element> resultList = new ArrayList<>(); final NodeList list = element.getChildNodes(); for (int i = 0; i < list.getLength(); i++) { final Node node = list.item(i); if (element.equals(node.getParentNode()) && node instanceof Element && childElementname.equals(node.getNodeName())) { resultList.add((Element) node); } } return resultList; } /** * Allows to check that some string has URI in appropriate format. * * @param uri string to be checked, must not be null * @return true if the string contains correct uri, false otherwise */ public static boolean isUriCorrect(@Nonnull final String uri) { return URI_PATTERN.matcher(uri).matches(); } /** * Get max image size. * * @return max image size * @see #MAX_IMAGE_SIDE_SIZE_IN_PIXELS * @see #PROPERTY_MAX_EMBEDDED_IMAGE_SIDE_SIZE */ public static int getMaxImageSize() { int result = MAX_IMAGE_SIDE_SIZE_IN_PIXELS; try { final String defined = System.getProperty(PROPERTY_MAX_EMBEDDED_IMAGE_SIDE_SIZE); if (defined != null) { LOGGER.info("Detected redefined max size for embedded image side : " + defined); //NOI18N result = Math.max(8, Integer.parseInt(defined.trim())); } } catch (NumberFormatException ex) { LOGGER.error("Error during image size decoding : ", ex); //NOI18N } return result; } /** * Load and encode image into Base64. * * @param in stream to read image * @param maxSize max size of image, if less or zero then don't rescale * @return null if it was impossible to load image for its format, loaded * prepared image * @throws IOException if any error during conversion or loading * @since 1.4.0 */ @Nullable public static String rescaleImageAndEncodeAsBase64(@Nonnull final InputStream in, final int maxSize) throws IOException { final Image image = ImageIO.read(in); String result = null; if (image != null) { result = rescaleImageAndEncodeAsBase64(image, maxSize); } return result; } /** * Load and encode image into Base64 from file. * * @param file image file * @param maxSize max size of image, if less or zero then don't rescale * @return image * @throws IOException if any error during conversion or loading * @since 1.4.0 */ @Nonnull public static String rescaleImageAndEncodeAsBase64(@Nonnull final File file, final int maxSize) throws IOException { final Image image = ImageIO.read(file); if (image == null) { throw new IllegalArgumentException("Can't load image file : " + file); //NOI18N } return rescaleImageAndEncodeAsBase64(image, maxSize); } /** * Get default render quality for host OS. * * @return the render quality for host OS, must not be null * @since 1.4.5 */ @Nonnull public static RenderQuality getDefaultRenderQialityForOs() { RenderQuality result = RenderQuality.DEFAULT; if (SystemUtils.IS_OS_MAC || SystemUtils.IS_OS_WINDOWS) { result = RenderQuality.QUALITY; } return result; } /** * Rescale image and encode into Base64. * * @param image image to rescale and encode * @param maxSize max size of image, if less or zero then don't rescale * @return scaled and encoded image * @throws IOException if it was impossible to encode image * @since 1.4.0 */ @Nonnull public static String rescaleImageAndEncodeAsBase64(@Nonnull Image image, final int maxSize) throws IOException { final int width = image.getWidth(null); final int height = image.getHeight(null); final int maxImageSideSize = maxSize > 0 ? maxSize : Math.max(width, height); final float imageScale = width > maxImageSideSize || height > maxImageSideSize ? (float) maxImageSideSize / (float) Math.max(width, height) : 1.0f; if (!(image instanceof RenderedImage) || Float.compare(imageScale, 1.0f) != 0) { final int swidth; final int sheight; if (Float.compare(imageScale, 1.0f) == 0) { swidth = width; sheight = height; } else { swidth = Math.round(imageScale * width); sheight = Math.round(imageScale * height); } final BufferedImage buffer = new BufferedImage(swidth, sheight, BufferedImage.TYPE_INT_ARGB); final Graphics2D gfx = buffer.createGraphics(); gfx.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); gfx.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); gfx.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY); gfx.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); gfx.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_DISABLE); gfx.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY); gfx.drawImage(image, AffineTransform.getScaleInstance(imageScale, imageScale), null); gfx.dispose(); image = buffer; } final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try { if (!ImageIO.write((RenderedImage) image, "png", bos)) { throw new IOException("Can't encode image as PNG"); } } finally { IOUtils.closeQuietly(bos); } return Utils.base64encode(bos.toByteArray()); } public static int calculateColorBrightness(@Nonnull final Color color) { return (int) Math.sqrt(color.getRed() * color.getRed() * .241d + color.getGreen() * color.getGreen() * .691d + color.getBlue() * color.getBlue() * .068d); } public static boolean isDarkTheme() { final Color panelBack = UIManager.getColor("Panel.background"); if (panelBack == null) { return false; } else { return calculateColorBrightness(panelBack) < 150; } } @Nonnull public static String convertCamelCasedToHumanForm(@Nonnull final String camelCasedString, final boolean capitalizeFirstChar) { final StringBuilder result = new StringBuilder(); boolean notFirst = false; for (final char c : camelCasedString.toCharArray()) { if (notFirst) { if (Character.isUpperCase(c)) { result.append(' '); result.append(Character.toLowerCase(c)); } else { result.append(c); } } else { notFirst = true; if (capitalizeFirstChar) { result.append(Character.toUpperCase(c)); } else { result.append(c); } } } return result.toString(); } @Nonnull @MustNotContainNull public static Topic[] getLeftToRightOrderedChildrens(@Nonnull final Topic topic) { final List<Topic> result = new ArrayList<>(); if (topic.getTopicLevel() == 0) { for (final Topic t : topic.getChildren()) { if (AbstractCollapsableElement.isLeftSidedTopic(t)) { result.add(t); } } for (final Topic t : topic.getChildren()) { if (!AbstractCollapsableElement.isLeftSidedTopic(t)) { result.add(t); } } } else { result.addAll(topic.getChildren()); } return result.toArray(new Topic[0]); } public static boolean safeObjectEquals(@Nullable final Object obj1, @Nullable final Object obj2) { if (obj1 == obj2) { return true; } if (obj1 == null || obj2 == null) { return false; } return obj1.equals(obj2); } public static void setAttribute(@Nonnull final String name, @Nullable final String value, @Nonnull @MustNotContainNull final Topic[] topics) { for (final Topic t : topics) { t.setAttribute(name, value); } } @Nullable public static Color html2color(@Nullable final String str, final boolean hasAlpha) { Color result = null; if (str != null && !str.isEmpty() && str.charAt(0) == '#') { try { String color = str.substring(1); if (color.length() > 6) { color = color.substring(color.length() - 6); } if (color.length() == 6) { result = new Color(Integer.parseInt(color, 16), hasAlpha); } else if (color.length() == 3) { final int r = Integer.parseInt(color.charAt(0) + "0", 16); final int g = Integer.parseInt(color.charAt(1) + "0", 16); final int b = Integer.parseInt(color.charAt(2) + "0", 16); result = new Color(r, g, b); } } catch (NumberFormatException ex) { LOGGER.warn(String.format("Can't convert %s to color", str)); } } return result; } @Nullable public static String color2html(@Nullable final Color color, final boolean hasAlpha) { String result = null; if (color != null) { final StringBuilder buffer = new StringBuilder(); buffer.append('#'); final int[] components; if (hasAlpha) { components = new int[]{color.getAlpha(), color.getRed(), color.getGreen(), color.getBlue()}; } else { components = new int[]{color.getRed(), color.getGreen(), color.getBlue()}; } for (final int c : components) { final String str = Integer.toHexString(c & 0xFF).toUpperCase(Locale.ENGLISH); if (str.length() < 2) { buffer.append('0'); } buffer.append(str); } result = buffer.toString(); } return result; } @Nonnull public static String getFirstLine(@Nonnull final String text) { return text.replace("\r", "").split("\\n")[0]; //NOI18N } @Nonnull public static String makeShortTextVersion(@Nonnull String text, final int maxLength) { if (text.length() > maxLength) { text = text.substring(0, maxLength) + "..."; //NOI18N } return text; } public static void safeSwingCall(@Nonnull final Runnable runnable) { if (SwingUtilities.isEventDispatchThread()) { runnable.run(); } else { SwingUtilities.invokeLater(runnable); } } public static void safeSwingBlockingCall(@Nonnull final Runnable runnable) { if (SwingUtilities.isEventDispatchThread()) { runnable.run(); } else { try { SwingUtilities.invokeAndWait(runnable); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } catch (InvocationTargetException ex) { throw new RuntimeException("Detected exception during SwingUtilities.invokeAndWait", ex); } } } @Nonnull @MustNotContainNull public static String[] breakToLines(@Nonnull final String text) { final int lineNum = numberOfLines(text); final String[] result = new String[lineNum]; final StringBuilder line = new StringBuilder(); int index = 0; for (int i = 0; i < text.length(); i++) { if (text.charAt(i) == '\n') { result[index++] = line.toString(); line.setLength(0); } else { line.append(text.charAt(i)); } } result[index] = line.toString(); return result; } public static int numberOfLines(@Nonnull final String text) { int result = 1; for (int i = 0; i < text.length(); i++) { if (text.charAt(i) == '\n') { result++; } } return result; } @ImplementationNote("Must be called from Swing UI thread") public static void foldUnfoldTree(@Nonnull final JTree tree, final boolean unfold) { final TreeModel model = tree.getModel(); if (model != null) { final Object root = model.getRoot(); if (root != null) { final TreePath thePath = new TreePath(root); setTreeState(tree, thePath, true, unfold); if (!unfold) { setTreeState(tree, thePath, false, true); } } } } private static void setTreeState(@Nonnull final JTree tree, @Nonnull final TreePath path, final boolean recursively, final boolean unfold) { final Object lastNode = path.getLastPathComponent(); for (int i = 0; i < tree.getModel().getChildCount(lastNode); i++) { final Object child = tree.getModel().getChild(lastNode, i); final TreePath pathToChild = path.pathByAddingChild(child); if (recursively) { setTreeState(tree, pathToChild, recursively, unfold); } } if (unfold) { tree.expandPath(path); } else { tree.collapsePath(path); } } public static @Nonnull String removeAllISOControlsButTabs(@Nonnull final String str) { final StringBuilder result = new StringBuilder(str.length()); for (final char c : str.toCharArray()) { if (c != '\t' && Character.isISOControl(c)) { continue; } result.append(c); } return result.toString(); } @Nullable public static Point2D findRectEdgeIntersection(@Nonnull final Rectangle2D rect, final double outboundX, final double outboundY) { final int detectedSide = rect.outcode(outboundX, outboundY); if ((detectedSide & (Rectangle2D.OUT_TOP | Rectangle2D.OUT_BOTTOM)) != 0) { final boolean top = (detectedSide & Rectangle2D.OUT_BOTTOM) == 0; final double dx = outboundX - rect.getCenterX(); if (dx == 0.0d) { return new Point2D.Double(rect.getCenterX(), top ? rect.getMinY() : rect.getMaxY()); } else { final double halfy = top ? rect.getHeight() / 2 : -rect.getHeight() / 2; final double coeff = (outboundY - rect.getCenterY()) / dx; final double calculatedX = rect.getCenterX() - (halfy / coeff); if (calculatedX >= rect.getMinX() && calculatedX <= rect.getMaxX()) { return new Point2D.Double(calculatedX, top ? rect.getMinY() : rect.getMaxY()); } } } if ((detectedSide & (Rectangle2D.OUT_LEFT | Rectangle2D.OUT_RIGHT)) != 0) { final boolean left = (detectedSide & Rectangle2D.OUT_RIGHT) == 0; final double dy = outboundY - rect.getCenterY(); if (dy == 0.0d) { return new Point2D.Double(left ? rect.getMinX() : rect.getMaxX(), rect.getCenterY()); } else { final double halfx = left ? rect.getWidth() / 2 : -rect.getWidth() / 2; final double coeff = (outboundX - rect.getCenterX()) / dy; final double calculatedY = rect.getCenterY() - (halfx / coeff); if (calculatedY >= rect.getMinY() && calculatedY <= rect.getMaxY()) { return new Point2D.Double(left ? rect.getMinX() : rect.getMaxX(), calculatedY); } } } return null; } public static boolean isPlantUmlFileExtension(@Nonnull final String lowerCasedTrimmedExtension) { boolean result = false; if (lowerCasedTrimmedExtension.length() > 1 && lowerCasedTrimmedExtension.charAt(0) == 'p') { result = "pu".equals(lowerCasedTrimmedExtension) || "puml".equals(lowerCasedTrimmedExtension) || "plantuml".equals(lowerCasedTrimmedExtension); } return result; } @Nullable public static Image scaleImage(@Nonnull final Image src, final double baseScaleX, final double baseScaleY, final double scale) { final int imgw = src.getWidth(null); final int imgh = src.getHeight(null); final int scaledW = (int) Math.round(imgw * baseScaleX * scale); final int scaledH = (int) Math.round(imgh * baseScaleY * scale); BufferedImage result = null; if (scaledH > 0 && scaledW > 0) { try { result = new BufferedImage(scaledW, scaledH, BufferedImage.TYPE_INT_ARGB); final Graphics2D g = (Graphics2D) result.getGraphics(); RenderQuality.QUALITY.prepare(g); g.drawImage(src, 0, 0, scaledW, scaledH, null); g.dispose(); } catch (OutOfMemoryError e) { LOGGER.error("OutOfmemoryError in scaleImage (" + baseScaleX + ',' + baseScaleY + ',' + scale + ')', e); throw e; } } return result; } @Nonnull public static Image renderWithTransparency(final float opacity, @Nonnull final AbstractElement element, @Nonnull final MindMapPanelConfig config, @Nonnull final RenderQuality quality) { final AbstractElement cloned = element.makeCopy(); final Rectangle2D bounds = cloned.getBounds(); final float increase = config.safeScaleFloatValue(config.getElementBorderWidth() + config.getShadowOffset(), 0.0f); final int imageWidth = (int) Math.round(bounds.getWidth() + increase); final int imageHeight = (int) Math.round(bounds.getHeight() + increase); bounds.setRect(0.0d, 0.0d, bounds.getWidth(), bounds.getHeight()); final BufferedImage result = new BufferedImage(imageWidth, imageHeight, BufferedImage.TYPE_INT_ARGB); for (int y = 0; y < imageHeight; y++) { for (int x = 0; x < imageWidth; x++) { result.setRGB(x, y, 0); } } final Graphics2D g = result.createGraphics(); final MMGraphics gfx = new MMGraphics2DWrapper(g); try { quality.prepare(g); cloned.doPaint(gfx, config, false); } finally { gfx.dispose(); } int alpha; if (opacity <= 0.0f) { alpha = 0x00; } else if (opacity >= 1.0f) { alpha = 0xFF; } else { alpha = Math.round(0xFF * opacity); } alpha <<= 24; for (int y = 0; y < imageHeight; y++) { for (int x = 0; x < imageWidth; x++) { final int curAlpha = result.getRGB(x, y) >>> 24; if (curAlpha == 0xFF) { result.setRGB(x, y, (result.getRGB(x, y) & 0xFFFFFF) | alpha); } else if (curAlpha != 0x00) { final int calculated = Math.round(curAlpha * opacity) << 24; result.setRGB(x, y, (result.getRGB(x, y) & 0xFFFFFF) | calculated); } } } return result; } @Nonnull public static Color makeContrastColor(@Nonnull final Color color) { return new Color(color.getRed() ^ 0xFF, color.getGreen() ^ 0xFF, color.getBlue() ^ 0xFF); } @Nonnull @MustNotContainNull private static List<JMenuItem> findPopupMenuItems( @Nonnull final PluginContext context, @Nonnull final PopUpSection section, final boolean fullScreenModeActive, @Nonnull @MayContainNull final List<JMenuItem> list, @Nullable final Topic topicUnderMouse, @Nonnull @MustNotContainNull final List<PopUpMenuItemPlugin> pluginMenuItems ) { list.clear(); for (final PopUpMenuItemPlugin p : pluginMenuItems) { if (fullScreenModeActive && !p.isCompatibleWithFullScreenMode()) { continue; } if (p.getSection() == section) { if (!(p.needsTopicUnderMouse() || p.needsSelectedTopics()) || (p.needsTopicUnderMouse() && topicUnderMouse != null) || (p.needsSelectedTopics() && context.getSelectedTopics().length > 0)) { final JMenuItem item = p.makeMenuItem(context, topicUnderMouse); if (item != null) { item.setEnabled(p.isEnabled(context, topicUnderMouse)); list.add(item); } } } } return list; } public static void assertSwingDispatchThread() { if (!SwingUtilities.isEventDispatchThread()) { throw new Error("Must be called in Swing dispatch thread"); } } @Nonnull @MustNotContainNull private static List<JMenuItem> putAllItemsAsSection(@Nonnull final JPopupMenu menu, @Nullable final JMenu subMenu, @Nonnull @MustNotContainNull final List<JMenuItem> items) { if (!items.isEmpty()) { if (menu.getComponentCount() > 0) { menu.add(UI_COMPO_FACTORY.makeMenuSeparator()); } for (final JMenuItem i : items) { if (subMenu == null) { menu.add(i); } else { subMenu.add(i); } } if (subMenu != null) { menu.add(subMenu); } } return items; } public static boolean isDataFlavorAvailable(@Nonnull final Clipboard clipboard, @Nonnull final DataFlavor flavor) { boolean result = false; try { result = clipboard.isDataFlavorAvailable(flavor); } catch (final IllegalStateException ex) { LOGGER.warn("Can't get access to clipboard : " + ex.getMessage()); } return result; } @Nonnull public static JPopupMenu makePopUp( @Nonnull final PluginContext context, final boolean fullScreenModeActive, @Nullable final Topic topicUnderMouse ) { final JPopupMenu result = UI_COMPO_FACTORY.makePopupMenu(); final List<PopUpMenuItemPlugin> pluginMenuItems = MindMapPluginRegistry.getInstance().findFor(PopUpMenuItemPlugin.class); final List<JMenuItem> tmpList = new ArrayList<>(); final boolean isModelNotEmpty = context.getPanel().getModel().getRoot() != null; putAllItemsAsSection(result, null, findPopupMenuItems(context, PopUpSection.MAIN, fullScreenModeActive, tmpList, topicUnderMouse, pluginMenuItems)); putAllItemsAsSection(result, null, findPopupMenuItems(context, PopUpSection.MANIPULATORS, fullScreenModeActive, tmpList, topicUnderMouse, pluginMenuItems)); putAllItemsAsSection(result, null, findPopupMenuItems(context, PopUpSection.EXTRAS, fullScreenModeActive, tmpList, topicUnderMouse, pluginMenuItems)); final JMenu exportMenu = UI_COMPO_FACTORY.makeMenu(BUNDLE.getString("MMDExporters.SubmenuName")); exportMenu.setIcon(ICON_SERVICE.getIconForId(IconID.POPUP_EXPORT)); final JMenu importMenu = UI_COMPO_FACTORY.makeMenu(BUNDLE.getString("MMDImporters.SubmenuName")); importMenu.setIcon(ICON_SERVICE.getIconForId(IconID.POPUP_IMPORT)); putAllItemsAsSection(result, importMenu, findPopupMenuItems(context, PopUpSection.IMPORT, fullScreenModeActive, tmpList, topicUnderMouse, pluginMenuItems)); if (isModelNotEmpty) { putAllItemsAsSection(result, exportMenu, findPopupMenuItems(context, PopUpSection.EXPORT, fullScreenModeActive, tmpList, topicUnderMouse, pluginMenuItems)); } putAllItemsAsSection(result, null, findPopupMenuItems(context, PopUpSection.TOOLS, fullScreenModeActive, tmpList, topicUnderMouse, pluginMenuItems)); putAllItemsAsSection(result, null, findPopupMenuItems(context, PopUpSection.MISC, fullScreenModeActive, tmpList, topicUnderMouse, pluginMenuItems)); return result; } public static boolean isPopupEvent(@Nullable final MouseEvent mouseEvent) { return mouseEvent != null && mouseEvent.getButton() != 0 && mouseEvent.isPopupTrigger(); } public static boolean isKeyStrokeEvent(@Nullable final KeyStroke keyStroke, final int keyEventType, @Nullable final KeyEvent event) { boolean result = false; if (keyStroke != null && event != null) { if (keyEventType == keyStroke.getKeyEventType()) { result = ((keyStroke.getModifiers() & event.getModifiers()) == keyStroke.getModifiers()) && (keyStroke.getKeyCode() == event.getKeyCode()); } } return result; } @Nonnull @MustNotContainNull private static List<JButton> findAllOptionPaneButtons(@Nonnull final JComponent component) { final List<JButton> result = new ArrayList<>(); Arrays.stream(component.getComponents()) .filter(x -> x instanceof JComponent) .map(x -> (JComponent) x) .forEach(x -> { if (x instanceof JButton) { if ("OptionPane.button".equals(x.getName())) { result.add((JButton) x); } } else { result.addAll(findAllOptionPaneButtons(x)); } }); return result; } private static void replaceActionListenerForButton(@Nonnull final JButton button, @Nonnull final ActionListener listener) { final ActionListener[] currentListeners = button.getActionListeners(); for (final ActionListener l : currentListeners) { button.removeActionListener(l); } button.addActionListener(listener); } @Nonnull @SafeVarargs @ReturnsOriginal public static JComponent catchEscInParentDialog( @Nonnull final JComponent component, @Nonnull final DialogProvider dialogProvider, @Nullable final Predicate<JDialog> doClose, @Nonnull @MustNotContainNull final Consumer<JDialog>... beforeClose) { component.addHierarchyListener(new HierarchyListener() { final Consumer<JDialog> processor = dialog -> { final boolean close; if (doClose == null) { close = true; } else { if (doClose.test(dialog)) { close = dialogProvider .msgConfirmOkCancel(dialog, BUNDLE.getString("Utils.confirmActionTitle"), BUNDLE.getString("Utils.closeForContentChange")); } else { close = true; } } if (close) { try { for (final Consumer<JDialog> c : beforeClose) { try { c.accept(dialog); } catch (Exception ex) { LOGGER.error("Error during before close call", ex); } } } finally { dialog.dispose(); } } }; private final KeyStroke escapeKeyStroke = KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0, false); private final List<WindowListener> foundWindowListeners = new ArrayList<>(); @Override public void hierarchyChanged(@Nonnull final HierarchyEvent e) { final Window window = SwingUtilities.getWindowAncestor(component); if (window instanceof JDialog && (e.getChangeFlags() & HierarchyEvent.PARENT_CHANGED) != 0) { final JDialog dialog = (JDialog) window; final List<JButton> dialogButtons = findAllOptionPaneButtons(dialog.getRootPane()); dialogButtons.stream() .filter(x -> "cancel".equalsIgnoreCase(x.getText())) .forEach(x -> replaceActionListenerForButton(x, be -> { processor.accept(dialog); })); dialog.setDefaultCloseOperation(JDialog.DO_NOTHING_ON_CLOSE); final WindowListener windowListener = new WindowListener() { @Override public void windowClosing(@Nonnull final WindowEvent e) { processor.accept(dialog); } @Override public void windowOpened(@Nonnull final WindowEvent e) { foundWindowListeners.forEach(x -> x.windowOpened(e)); } @Override public void windowClosed(@Nonnull final WindowEvent e) { foundWindowListeners.forEach(x -> x.windowClosed(e)); } @Override public void windowIconified(@Nonnull final WindowEvent e) { foundWindowListeners.forEach(x -> x.windowIconified(e)); } @Override public void windowDeiconified(@Nonnull final WindowEvent e) { foundWindowListeners.forEach(x -> x.windowDeiconified(e)); } @Override public void windowActivated(@Nonnull final WindowEvent e) { foundWindowListeners.forEach(x -> x.windowActivated(e)); } @Override public void windowDeactivated(@Nonnull final WindowEvent e) { foundWindowListeners.forEach(x -> x.windowDeactivated(e)); } }; if (this.foundWindowListeners.isEmpty()) { final WindowListener[] windowListeners = dialog.getWindowListeners(); for (final WindowListener w : windowListeners) { dialog.removeWindowListener(w); this.foundWindowListeners.add(w); } dialog.addWindowListener(windowListener); } final InputMap inputMap = dialog.getRootPane().getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); inputMap.put(escapeKeyStroke, "PRESSING_ESCAPE"); final ActionMap actionMap = dialog.getRootPane().getActionMap(); actionMap.put("PRESSING_ESCAPE", new AbstractAction() { @Override public void actionPerformed(@Nonnull final ActionEvent e) { processor.accept(dialog); } }); } } }); return component; } @Nonnull public static byte[] base64decode(@Nonnull final String text) throws IOException { return Base64.decode(text); } @Nonnull public static String strip(@Nonnull final String str, final boolean leading) { if (str.trim().isEmpty()) { return ""; } final Matcher matcher = STRIP_PATTERN.matcher(str); if (!matcher.find()) { throw new Error("Unexpected error in strip(String): " + str); } return leading ? matcher.group(2) + matcher.group(3) : matcher.group(1) + matcher.group(2); } @Nonnull public static String base64encode(@Nonnull final byte[] data) { return Base64.encodeBytes(data); } }
/* * Copyright (c) 2017 Ignite Realtime Foundation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Jitsi Videobridge, OpenSource video conferencing. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package org.jivesoftware.openfire.plugin.ofmeet; import org.igniterealtime.openfire.plugin.ofmeet.config.OFMeetConfig; import org.jivesoftware.openfire.plugin.spark.BookmarkManager; import org.jivesoftware.openfire.plugin.spark.Bookmark; import org.jivesoftware.openfire.vcard.VCardManager; import org.jivesoftware.openfire.user.User; import org.jivesoftware.openfire.group.Group; import org.jivesoftware.openfire.group.GroupManager; import org.jivesoftware.openfire.group.GroupNotFoundException; import org.jivesoftware.openfire.XMPPServer; import org.jivesoftware.util.JiveGlobals; import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.ServletException; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.*; import java.util.*; import org.dom4j.*; import org.xmpp.packet.JID; /** * A servlet that generates a snippet of javascript (json) that is the 'config' variable, as used by the Jitsi * Meet webapplication. * * @author Guus der Kinderen, guus.der.kinderen@gmail.com */ public class ConfigServlet extends HttpServlet { /* As compared to version 0.3 of OFMeet, various bits are missing: - user avatars cannot be set in the config any longer - likely, it needs to be retrieved by the webapp though XMPP - bookmarks/autojoin should now also be retrieved by the webapp, through XMPP. - authentication should no longer occur at a servlet base, as the webapp now performs XMPP-based auth. We want to prevent duplicate logins. - SIP functionality was removed (but should likely be restored). - usenodejs config property was removed (does not appear to do anything any longer?) */ private static final Logger Log = LoggerFactory.getLogger( ConfigServlet.class ); public static String globalConferenceId = null; public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { try { Log.trace( "[{}] config requested.", request.getRemoteAddr() ); final OFMeetConfig ofMeetConfig = new OFMeetConfig(); final boolean isSwitchAvailable = JiveGlobals.getBooleanProperty("freeswitch.enabled", false); final String xmppDomain = XMPPServer.getInstance().getServerInfo().getXMPPDomain(); final String sipDomain = JiveGlobals.getProperty("freeswitch.sip.hostname", getIpAddress()); final JSONObject conferences = new JSONObject(); writeHeader( response ); ServletOutputStream out = response.getOutputStream(); String recordingKey = null; int minHDHeight = JiveGlobals.getIntProperty( "org.jitsi.videobridge.ofmeet.min.hdheight", 540 ); boolean audioMixer = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.audio.mixer", false ); int audioBandwidth = JiveGlobals.getIntProperty( "org.jitsi.videobridge.ofmeet.audio.bandwidth", 128 ); int videoBandwidth = JiveGlobals.getIntProperty( "org.jitsi.videobridge.ofmeet.video.bandwidth", 4096 ); boolean useNicks = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.usenicks", false ); boolean useIPv6 = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.useipv6", false ); boolean useStunTurn = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.use.stunturn", false ); boolean recordVideo = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.media.record", false ); String defaultSipNumber = JiveGlobals.getProperty( "org.jitsi.videobridge.ofmeet.default.sip.number", "" ); boolean useRtcpMux = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.use.rtcp.mux", true ); boolean useBundle = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.use.bundle", true ); boolean enableWelcomePage = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.enable.welcomePage", true ); boolean enableRtpStats = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.enable.rtp.stats", true ); boolean openSctp = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.open.sctp", true ); String desktopSharing = JiveGlobals.getProperty( "org.jitsi.videobridge.ofmeet.desktop.sharing", "ext" ); String chromeExtensionId = JiveGlobals.getProperty( "org.jitsi.videobridge.ofmeet.chrome.extension.id", "fohfnhgabmicpkjcpjpjongpijcffaba" ); String desktopShareSrcs = JiveGlobals.getProperty( "org.jitsi.videobridge.ofmeet.desktop.sharing.sources", "[\"screen\", \"window\"]" ); String minChromeExtVer = JiveGlobals.getProperty( "org.jitsi.videobridge.ofmeet.min.chrome.ext.ver", "0.1" ); int startBitrate = JiveGlobals.getIntProperty( "org.jitsi.videobridge.ofmeet.start.bitrate", 800 ); boolean logStats = JiveGlobals.getBooleanProperty( "org.jitsi.videobridge.ofmeet.enable.stats.logging", false ); String iceServers = JiveGlobals.getProperty( "org.jitsi.videobridge.ofmeet.iceservers", "" ); String xirsysUrl = JiveGlobals.getProperty( "ofmeet.xirsys.url", null ); if ( xirsysUrl != null ) { Log.debug( "OFMeetConfig. found xirsys Url " + xirsysUrl ); String xirsysJson = getHTML( xirsysUrl ); Log.debug( "OFMeetConfig. got xirsys json " + xirsysJson ); JSONObject jsonObject = new JSONObject( xirsysJson ); if (jsonObject.has( "d" )) { iceServers = jsonObject.getString( "d" ); } Log.debug( "OFMeetConfig. got xirsys iceSevers " + iceServers ); } final JSONObject config = new JSONObject(); boolean securityEnabled = JiveGlobals.getBooleanProperty("ofmeet.security.enabled", true); if (securityEnabled && request.getUserPrincipal() != null) { handleAuthenticatedUser(config, request, xmppDomain, conferences); } final Map<String, String> hosts = new HashMap<>(); hosts.put( "domain", xmppDomain ); hosts.put( "muc", "conference." + xmppDomain ); hosts.put( "bridge", "jitsi-videobridge." + xmppDomain ); hosts.put( "focus", "focus." + xmppDomain ); if (isSwitchAvailable) { hosts.put( "callcontrol", "callcontrol." + xmppDomain ); hosts.put( "sip", sipDomain ); } config.put( "hosts", hosts ); if ( iceServers != null && !iceServers.trim().isEmpty() ) { config.put( "iceServers", iceServers.trim() ); } config.put( "enforcedBridge", "jitsi-videobridge." + xmppDomain ); config.put( "useStunTurn", useStunTurn ); config.put( "useIPv6", useIPv6 ); config.put( "useNicks", useNicks ); config.put( "useRtcpMux", useRtcpMux ); config.put( "useBundle", useBundle ); config.put( "enableWelcomePage", enableWelcomePage ); config.put( "enableRtpStats", enableRtpStats ); config.put( "enableLipSync", ofMeetConfig.getLipSync() ); config.put( "openSctp", openSctp ); if ( recordingKey == null || recordingKey.isEmpty() ) { config.put( "enableRecording", recordVideo ); } else { config.put( "recordingKey", recordingKey ); } config.put( "clientNode", "http://igniterealtime.org/ofmeet/jitsi-meet/" ); config.put( "focusUserJid", XMPPServer.getInstance().createJID( "focus", null ).toBareJID() ); config.put( "defaultSipNumber", defaultSipNumber ); config.put( "desktopSharingChromeMethod", desktopSharing ); config.put( "desktopSharingChromeExtId", chromeExtensionId ); config.put( "desktopSharingChromeSources", new JSONArray( desktopShareSrcs ) ); config.put( "desktopSharingChromeMinExtVersion", minChromeExtVer ); config.put( "minHDHeight", minHDHeight ); config.put( "desktopSharingFirefoxExtId", "jidesha@meet.jit.si" ); config.put( "desktopSharingFirefoxDisabled", false ); config.put( "desktopSharingFirefoxMaxVersionExtRequired", 51 ); config.put( "desktopSharingFirefoxExtensionURL", request.getScheme() + "://" + request.getServerName() + ":" + request.getServerPort() + request.getContextPath() + "/jidesha-0.1.1-fx.xpi"); config.put( "desktopSharingFirefoxExtId", "jidesha@meet.jit.si" ); config.put( "hiddenDomain", "recorder." + xmppDomain ); config.put( "startBitrate", startBitrate ); config.put( "recordingType", "colibri" ); config.put( "disableAudioLevels", false ); config.put( "stereo", true ); config.put( "requireDisplayName", false ); config.put( "startAudioOnly", ofMeetConfig.getStartAudioOnly() ); if ( ofMeetConfig.getStartAudioMuted() != null ) { config.put( "startAudioMuted", ofMeetConfig.getStartAudioMuted() ); } if ( ofMeetConfig.getStartVideoMuted() != null ) { config.put( "startVideoMuted", ofMeetConfig.getStartVideoMuted() ); } config.put( "resolution", ofMeetConfig.getResolution() ); config.put( "audioMixer", audioMixer ); config.put( "audioBandwidth", audioBandwidth ); config.put( "videoBandwidth", videoBandwidth ); config.put( "useRoomAsSharedDocumentName", false ); config.put( "logStats", logStats ); config.put( "conferences", conferences ); if ( globalConferenceId != null && !globalConferenceId.isEmpty() ) { config.put( "globalConferenceId", globalConferenceId ); } config.put( "disableRtx", true ); config.put( "bosh", getMostPreferredConnectionURL( request ) ); config.put( "channelLastN", ofMeetConfig.getChannelLastN() ); config.put( "adaptiveLastN", ofMeetConfig.getAdaptiveLastN() ); config.put( "disableSimulcast", !ofMeetConfig.getSimulcast() ); // TODO: find out if both of the settings below are in use (seems silly). config.put( "adaptiveSimulcast", ofMeetConfig.getAdaptiveSimulcast() ); config.put( "disableAdaptiveSimulcast", !ofMeetConfig.getAdaptiveSimulcast() ); out.println( "var config = " + config.toString( 2 ) + ";" ); } catch ( Exception e ) { Log.error( "OFMeetConfig doGet Error", e ); } } private void handleAuthenticatedUser(JSONObject config, HttpServletRequest request, String xmppDomain, JSONObject conferences) { String userName = request.getUserPrincipal().getName(); config.put( "id", userName + "@" + xmppDomain ); try { User user = XMPPServer.getInstance().getUserManager().getUser(userName); config.put( "emailAddress", user.getEmail() ); config.put( "nickName", user.getName() ); } catch (Exception e) { Log.error( "OFMeetConfig doGet Error", e ); } final String token = TokenManager.getInstance().retrieveToken(request.getUserPrincipal()); if (token != null) { config.put( "password", token ); } VCardManager vcardManager = VCardManager.getInstance(); Element vcard = vcardManager.getVCard(userName); if (vcard != null) { Element photo = vcard.element("PHOTO"); if (photo != null) { String type = photo.element("TYPE").getText(); String binval = photo.element("BINVAL").getText(); config.put( "userAvatar", "data:" + type + ";base64," + binval.replace("\n", "").replace("\r", "") ); } } try { final Collection<Bookmark> bookmarks = BookmarkManager.getBookmarks(); for (Bookmark bookmark : bookmarks) { boolean addBookmarkForUser = bookmark.isGlobalBookmark() || isBookmarkForJID(userName, bookmark); if (addBookmarkForUser) { if (bookmark.getType() == Bookmark.Type.group_chat) { String conferenceRoom = (new JID(bookmark.getValue())).getNode(); String autoJoin = bookmark.getProperty("autojoin"); JSONObject conference = new JSONObject(); conference.put("name", bookmark.getName()); conference.put("jid", bookmark.getValue()); if (autoJoin != null && "true".equals(autoJoin)) { conference.put("audiobridgeNumber", conferenceRoom); } conferences.put(conferenceRoom, conference); } } } } catch (Exception e) { Log.error("Config servlet", e); } } private boolean isBookmarkForJID(String username, Bookmark bookmark) { if (username == null || username.equals("null")) return false; if (bookmark.getUsers().contains(username)) { return true; } Collection<String> groups = bookmark.getGroups(); if (groups != null && !groups.isEmpty()) { GroupManager groupManager = GroupManager.getInstance(); for (String groupName : groups) { try { Group group = groupManager.getGroup(groupName); if (group.isUser(username)) { return true; } } catch (GroupNotFoundException e) { Log.debug(e.getMessage(), e); } } } return false; } private void writeHeader( HttpServletResponse response ) { try { response.setHeader( "Expires", "Sat, 6 May 1995 12:00:00 GMT" ); response.setHeader( "Cache-Control", "no-store, no-cache, must-revalidate" ); response.addHeader( "Cache-Control", "post-check=0, pre-check=0" ); response.setHeader( "Pragma", "no-cache" ); response.setHeader( "Content-Type", "application/javascript" ); response.setHeader( "Connection", "close" ); } catch ( Exception e ) { Log.error( "OFMeetConfig writeHeader Error", e ); } } public String getIpAddress() { String ourHostname = XMPPServer.getInstance().getServerInfo().getHostname(); String ourIpAddress = ourHostname; try { ourIpAddress = InetAddress.getByName(ourHostname).getHostAddress(); } catch (Exception e) { } return ourIpAddress; } private String getHTML( String urlToRead ) { URL url; HttpURLConnection conn; BufferedReader rd; String line; StringBuilder result = new StringBuilder(); try { url = new URL( urlToRead ); conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod( "GET" ); rd = new BufferedReader( new InputStreamReader( conn.getInputStream() ) ); while ( ( line = rd.readLine() ) != null ) { result.append( line ); } rd.close(); } catch ( Exception e ) { Log.error( "getHTML", e ); } return result.toString(); } /** * Generates an URL on which client / BOSH connections are expected. * * This method will verify if the websocket plugin is available. If it is, the websocket endpoint is returned. When * websocket is not available, the http-bind endpoint is returned. * * The request that is made to this servlet is used to determine if the client prefers secure/encrypted connections * (https, wss) over plain ones (http, ws), and to determine what the server address and port is. * * @param request the request to this servlet. * @return An URI (never null). * @throws URISyntaxException When an URI could not be constructed. */ public static URI getMostPreferredConnectionURL( HttpServletRequest request ) throws URISyntaxException { Log.debug( "[{}] Generating BOSH URL based on {}", request.getRemoteAddr(), request.getRequestURL() ); if ( XMPPServer.getInstance().getPluginManager().getPlugin( "websocket" ) != null ) { Log.debug( "[{}] Websocket plugin is available. Returning a websocket address.", request.getRemoteAddr() ); final String websocketScheme; if ( request.getScheme().endsWith( "s" ) ) { websocketScheme = "wss"; } else { websocketScheme = "ws"; } return new URI( websocketScheme, null, request.getServerName(), request.getServerPort(), "/ws/", null, null); } else { Log.debug( "[{}] No Websocket plugin available. Returning an HTTP-BIND address.", request.getRemoteAddr() ); return new URI( request.getScheme(), null, request.getServerName(), request.getServerPort(), "/http-bind/", null, null); } } }
package com.gotlaid.android; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.graphics.Typeface; import android.os.Bundle; import android.os.Handler; import android.support.design.widget.CoordinatorLayout; import android.support.design.widget.Snackbar; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.ProgressBar; import android.widget.RelativeLayout; import android.widget.TextView; import com.facebook.AccessToken; import com.facebook.FacebookSdk; import com.facebook.GraphRequest; import com.facebook.GraphResponse; import com.facebook.HttpMethod; import com.facebook.Profile; import com.facebook.appevents.AppEventsLogger; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseUser; import com.google.firebase.database.ChildEventListener; import com.google.firebase.database.DataSnapshot; import com.google.firebase.database.DatabaseError; import com.google.firebase.database.DatabaseReference; import com.google.firebase.database.FirebaseDatabase; import com.gotlaid.android.data.Action; import org.json.JSONArray; import org.json.JSONObject; import java.util.ArrayList; public class MainActivity extends AppCompatActivity { private SectionsPagerAdapter mSectionsPagerAdapter; public static Context context; private ViewPager mViewPager; private static RecyclerView mFriendsRecyclerView; private static FriendsListAdapter mFriendsAdapter; private static RecyclerView.LayoutManager mFriendsLayoutManager; public static RecyclerView mHistoryRecyclerView; private static HistoryListAdapter mHistoryAdapter; private static RecyclerView.LayoutManager mHistoryLayoutManager; private static CoordinatorLayout coordinatorLayout; private static Button gotLaidButton; private static ProgressBar historyListProgresBar; private static RelativeLayout historyListRecyclerViewHolder; public static TextView letYourFriendsKnowTv; public static Typeface workSansExtraBoldTypeface; private static AccessToken fbAccessToken; private static String fbUserId; private static String fbUserDisplayName; private static String fbUserFirstName; private static int buttonState = 0; //states of button: "click", "sure?", "notified" @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); context = getApplicationContext(); coordinatorLayout = (CoordinatorLayout) findViewById(R.id.main_content); FacebookSdk.sdkInitialize(getApplicationContext()); AppEventsLogger.activateApp(this); //check for firebase and fb user FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); fbAccessToken = AccessToken.getCurrentAccessToken(); if (user != null && fbAccessToken != null) { mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); workSansExtraBoldTypeface = Typeface.createFromAsset(getAssets(), "fonts/WorkSans-ExtraBold.ttf"); mViewPager = (ViewPager) findViewById(R.id.container); mViewPager.setAdapter(mSectionsPagerAdapter); mViewPager.setOffscreenPageLimit(2); mViewPager.setCurrentItem(1); fbUserId = Profile.getCurrentProfile().getId(); fbUserDisplayName = Profile.getCurrentProfile().getName(); fbUserFirstName = Profile.getCurrentProfile().getFirstName(); //enable offline persistence and sync try { FirebaseDatabase.getInstance().setPersistenceEnabled(true); DatabaseReference offlineRef = FirebaseDatabase.getInstance().getReference(fbUserId); offlineRef.keepSynced(true); }catch (Exception e){} fillFbFriendList(); final Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { setHistoryList(); } }, 100); } else { //launch login activity startActivity(new Intent(getApplicationContext(), LoginActivity.class)); finish(); } } //save selected friends when user leaves @Override protected void onPause(){ super.onPause(); try { mFriendsAdapter.saveUnselectedIds(getApplicationContext()); }catch (Exception e){} } public void unselectAll(View v){ mFriendsAdapter.unselectAll(); int number = mFriendsAdapter.getSelectedFriends().size(); getResources().getQuantityString(R.plurals.let_friends_know, number, number); } public void selectAll(View v){ mFriendsAdapter.selectAll(); int number = mFriendsAdapter.getSelectedFriends().size(); getResources().getQuantityString(R.plurals.let_friends_know, number, number); } public void uploadAction(View v){ switch (buttonState){ case 0: if (mFriendsAdapter.getSelectedFriends().size() > 0) { gotLaidButton.setText(getString(R.string.you_sure).replace(" ", "\n")); buttonState = 1; }else { gotLaidButton.setText(getString(R.string.i_just_got_laid)); buttonState = 0; mViewPager.setCurrentItem(0); Snackbar.make(coordinatorLayout, R.string.select_friend, Snackbar.LENGTH_LONG).show(); } break; case 1: try { ArrayList<Friend> selectedFriends = mFriendsAdapter.getSelectedFriends(); if (selectedFriends.size() > 0) { Action action = new Action(fbUserDisplayName, fbUserFirstName, fbUserId); FirebaseDatabase database = FirebaseDatabase.getInstance(); DatabaseReference myRef = database.getInstance().getReference(); for (Friend friend : selectedFriends) { String key = myRef.child(friend.uuid).push().getKey(); myRef.child(friend.uuid).child(key).setValue(action); } gotLaidButton.setVisibility(View.GONE); buttonState = 2; ((TextView) findViewById(R.id.whoopTv)).setTypeface(workSansExtraBoldTypeface); letYourFriendsKnowTv.setText( getResources().getQuantityString(R.plurals.friends_notified, selectedFriends.size(), selectedFriends.size())); final Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { gotLaidButton.setText(getString(R.string.i_just_got_laid)); gotLaidButton.setVisibility(View.VISIBLE); int number = mFriendsAdapter.getSelectedFriends().size(); letYourFriendsKnowTv.setText( getResources().getQuantityString(R.plurals.let_friends_know, number, number)); buttonState = 0; } }, 3000); }else { gotLaidButton.setText(getString(R.string.i_just_got_laid)); buttonState = 0; mViewPager.setCurrentItem(0); Snackbar.make(coordinatorLayout, R.string.select_friend, Snackbar.LENGTH_LONG).show(); } }catch (Exception e){ Snackbar.make(coordinatorLayout, R.string.no_friends_found, Snackbar.LENGTH_LONG).show(); gotLaidButton.setText(getString(R.string.i_just_got_laid)); gotLaidButton.setTextColor(Color.WHITE); gotLaidButton.setBackgroundResource(R.drawable.circle_black); buttonState = 0; } break; } } public void setHistoryList(){ mHistoryRecyclerView = (RecyclerView) findViewById(R.id.historyListRecyclerView); mHistoryLayoutManager = new LinearLayoutManager(MainActivity.this); mHistoryRecyclerView.setLayoutManager(mHistoryLayoutManager); mHistoryAdapter = new HistoryListAdapter(); mHistoryRecyclerView.setAdapter(mHistoryAdapter); FirebaseDatabase database = FirebaseDatabase.getInstance(); DatabaseReference myRef = database.getInstance().getReference(); myRef.child(fbUserId).limitToLast(100).addChildEventListener(new ChildEventListener() { @Override public void onChildAdded(DataSnapshot dataSnapshot, String s) { Action action = dataSnapshot.getValue(Action.class); mHistoryAdapter.addItem(0, action); historyListProgresBar.setVisibility(View.GONE); historyListRecyclerViewHolder.setVisibility(View.VISIBLE); } @Override public void onChildChanged(DataSnapshot dataSnapshot, String s) { Action action = dataSnapshot.getValue(Action.class); mHistoryAdapter.removeItem(action); } @Override public void onChildRemoved(DataSnapshot dataSnapshot) {} @Override public void onChildMoved(DataSnapshot dataSnapshot, String s) {} @Override public void onCancelled(DatabaseError databaseError) {} }); } public void fillFbFriendList() { try { new GraphRequest( AccessToken.getCurrentAccessToken(), "/me/friends", null, HttpMethod.GET, new GraphRequest.Callback() { public void onCompleted(GraphResponse response) { try { JSONArray data = response.getJSONObject().getJSONArray("data"); ArrayList<Friend> friends = new ArrayList<>(); for (int i = 0; i < data.length(); i++) { JSONObject obj = data.getJSONObject(i); friends.add(new Friend(obj.getString("name"), obj.getString("id"))); } //initialize FriendsListAdapter mFriendsRecyclerView = (RecyclerView) findViewById(R.id.friendsListRecyclerView); mFriendsLayoutManager = new LinearLayoutManager(MainActivity.this); mFriendsRecyclerView.setLayoutManager(mFriendsLayoutManager); mFriendsAdapter = FriendsListAdapter. friendsListAdapterWithMergeUnselected(friends, getApplicationContext()); mFriendsRecyclerView.setAdapter(mFriendsAdapter); findViewById(R.id.friendsListProgresBar).setVisibility(View.GONE); findViewById(R.id.friendsListRecyclerViewHolder).setVisibility(View.VISIBLE); letYourFriendsKnowTv.setText( getResources().getQuantityString( R.plurals.let_friends_know, friends.size(), friends.size())); } catch (Exception e) { //try again in 2s final Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { fillFbFriendList(); } }, 2000); } } } ).executeAsync(); }catch (Exception e){ //try again in 2s final Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { fillFbFriendList(); } }, 2000); } } public static class PlaceholderFragment extends Fragment { private static final String ARG_SECTION_NUMBER = "section_number"; public PlaceholderFragment() { } public static PlaceholderFragment newInstance(int sectionNumber) { PlaceholderFragment fragment = new PlaceholderFragment(); Bundle args = new Bundle(); args.putInt(ARG_SECTION_NUMBER, sectionNumber); fragment.setArguments(args); return fragment; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { int sectionNmuber = getArguments().getInt(ARG_SECTION_NUMBER); switch (sectionNmuber) { case 0: View rootFriendsView = inflater.inflate(R.layout.fragment_friends_list, container, false); ((TextView) rootFriendsView.findViewById(R.id.unselectFriendsAllButton)). setTypeface(workSansExtraBoldTypeface); ((TextView) rootFriendsView.findViewById(R.id.selectFriendsAllButton)). setTypeface(workSansExtraBoldTypeface); return rootFriendsView; case 1: View rootView = inflater.inflate(R.layout.fragment_main, container, false); gotLaidButton = (Button) rootView.findViewById(R.id.gotLaidButton); gotLaidButton.setTypeface(workSansExtraBoldTypeface); letYourFriendsKnowTv = (TextView) rootView.findViewById(R.id.letYourFriendsKnowTv); letYourFriendsKnowTv.setTypeface(workSansExtraBoldTypeface); return rootView; default: final View historyRootView = inflater.inflate(R.layout.fragment_history, container, false); historyListRecyclerViewHolder = (RelativeLayout) historyRootView.findViewById(R.id.historyListRecyclerViewHolder); historyListProgresBar = (ProgressBar) historyRootView.findViewById(R.id.historyListProgresBar); return historyRootView; } } } public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { return PlaceholderFragment.newInstance(position); } @Override public int getCount() { return 3; } } }
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.core.database; import java.sql.Driver; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.plugins.DatabaseMetaPlugin; import org.pentaho.di.core.row.ValueMetaInterface; @DatabaseMetaPlugin( type = "HIVE2", typeDescription = "Hadoop Hive 2" ) public class Hive2DatabaseMeta extends BaseDatabaseMeta implements DatabaseInterface { protected static final String JAR_FILE = "hive-jdbc-0.10.0-pentaho.jar"; protected static final String DRIVER_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver"; protected Integer driverMajorVersion; protected Integer driverMinorVersion; public Hive2DatabaseMeta() throws Throwable { } /** * Package protected constructor for unit testing. * * @param majorVersion * The majorVersion to set for the driver * @param minorVersion * The minorVersion to set for the driver * @throws Throwable */ Hive2DatabaseMeta( int majorVersion, int minorVersion ) throws Throwable { driverMajorVersion = majorVersion; driverMinorVersion = minorVersion; } @Override public int[] getAccessTypeList() { return new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE }; } @Override public String getAddColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean useAutoinc, String pk, boolean semicolon ) { return "ALTER TABLE " + tablename + " ADD " + getFieldDefinition( v, tk, pk, useAutoinc, true, false ); } @Override public String getDriverClass() { // !!! We will probably have to change this if we are providing our own driver, // i.e., before our code is committed to the Hadoop Hive project. return DRIVER_CLASS_NAME; } /** * This method assumes that Hive has no concept of primary and technical keys and auto increment columns. We are * ignoring the tk, pk and useAutoinc parameters. */ @Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc, boolean addFieldname, boolean addCr ) { String retval = ""; String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if ( addFieldname ) { retval += fieldname + " "; } int type = v.getType(); switch ( type ) { case ValueMetaInterface.TYPE_BOOLEAN: retval += "BOOLEAN"; break; // Hive does not support DATE case ValueMetaInterface.TYPE_DATE: retval += "STRING"; break; case ValueMetaInterface.TYPE_STRING: retval += "STRING"; break; case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: // Integer values... if ( precision == 0 ) { if ( length > 9 ) { if ( length < 19 ) { // can hold signed values between -9223372036854775808 and 9223372036854775807 // 18 significant digits retval += "BIGINT"; } else { retval += "FLOAT"; } } else { retval += "INT"; } } else { // Floating point values... if ( length > 15 ) { retval += "FLOAT"; } else { // A double-precision floating-point number is accurate to approximately 15 decimal places. // http://mysql.mirrors-r-us.net/doc/refman/5.1/en/numeric-type-overview.html retval += "DOUBLE"; } } break; } return retval; } @Override public String getModifyColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean useAutoinc, String pk, boolean semicolon ) { return "ALTER TABLE " + tablename + " MODIFY " + getFieldDefinition( v, tk, pk, useAutoinc, true, false ); } @Override public String getURL( String hostname, String port, String databaseName ) throws KettleDatabaseException { return "jdbc:hive2://" + hostname + ":" + port + "/" + databaseName; } @Override public String[] getUsedLibraries() { return new String[] { JAR_FILE }; } /** * Build the SQL to count the number of rows in the passed table. * * @param tableName * @return */ @Override public String getSelectCountStatement( String tableName ) { return "select count(1) from " + tableName; } @Override public String generateColumnAlias( int columnIndex, String suggestedName ) { if ( isDriverVersion( 0, 6 ) ) { return suggestedName; } else { // For version 0.5 and prior: // Column aliases are currently not supported in Hive. The default column alias // generated is in the format '_col##' where ## = column index. Use this format // so the result can be mapped back correctly. return "_col" + String.valueOf( columnIndex ); //$NON-NLS-1$ } } protected synchronized void initDriverInfo() { Integer majorVersion = 0; Integer minorVersion = 0; try { // Load the driver version number Class<?> driverClass = Class.forName( DRIVER_CLASS_NAME ); //$NON-NLS-1$ if ( driverClass != null ) { Driver driver = (Driver) driverClass.getConstructor().newInstance(); majorVersion = driver.getMajorVersion(); minorVersion = driver.getMinorVersion(); } } catch ( Exception e ) { // Failed to load the driver version, leave at the defaults } driverMajorVersion = majorVersion; driverMinorVersion = minorVersion; } /** * Check that the version of the driver being used is at least the driver you want. If you do not care about the minor * version, pass in a 0 (The assumption being that the minor version will ALWAYS be 0 or greater) * * @return true: the version being used is equal to or newer than the one you requested false: the version being used * is older than the one you requested */ protected boolean isDriverVersion( int majorVersion, int minorVersion ) { if ( driverMajorVersion == null ) { initDriverInfo(); } if ( majorVersion < driverMajorVersion ) { // Driver major version is newer than the requested version return true; } else if ( majorVersion == driverMajorVersion ) { // Driver major version is the same as requested, check the minor version if ( minorVersion <= driverMinorVersion ) { // Driver minor version is the same, or newer than requested return true; } } return false; } /** * Quotes around table names are not valid Hive QL * * return an empty string for the start quote */ public String getStartQuote() { return ""; } /** * Quotes around table names are not valid Hive QL * * return an empty string for the end quote */ public String getEndQuote() { return ""; } /** * @return a list of table types to retrieve tables for the database */ @Override public String[] getTableTypes() { return null; } /** * @return a list of table types to retrieve views for the database */ @Override public String[] getViewTypes() { return new String[] { "VIEW", "VIRTUAL_VIEW" }; } /** * @param tableName * The table to be truncated. * @return The SQL statement to truncate a table: remove all rows from it without a transaction */ @Override public String getTruncateTableStatement( String tableName ) { return null; } @Override public boolean supportsSetCharacterStream() { return false; } @Override public boolean supportsBatchUpdates() { return false; } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cli; import com.facebook.buck.android.AndroidDirectoryResolver; import com.facebook.buck.android.AndroidPlatformTarget; import com.facebook.buck.io.MoreFiles; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.io.Files; import org.kohsuke.args4j.Argument; import org.kohsuke.args4j.Option; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import javax.annotation.Nullable; /** * This class creates a terminal command for Buck that creates a sample Buck project in the * directory the user specifies. It copies from {@link #PATH_TO_QUICKSTART_DIR} to the * directory specified. It then asks the user for the location of the Android SDK so Buck can * successfully build the quickstart project. It will fail if it cannot find the template directory, * or if it is unable to write to the destination directory. */ public class QuickstartCommand extends AbstractCommand { private static final Path PATH_TO_QUICKSTART_DIR = Paths.get( System.getProperty( "buck.quickstart_origin_dir", new File("src/com/facebook/buck/cli/quickstart/android").getAbsolutePath())); @Option(name = "--dest-dir", usage = "Destination project directory") private String destDir = ""; @Nullable @Option(name = "--android-sdk", usage = "Android SDK directory") private String androidSdkDir; @Argument private List<String> arguments = Lists.newArrayList(); public List<String> getArguments() { return arguments; } @VisibleForTesting void setArguments(List<String> arguments) { this.arguments = arguments; } public String getDestDir() { return destDir; } public String getAndroidSdkDir(AndroidDirectoryResolver androidDirectoryResolver) { if (androidSdkDir == null) { Optional<Path> androidSdkDir = androidDirectoryResolver.findAndroidSdkDirSafe(); this.androidSdkDir = androidSdkDir.isPresent() ? androidSdkDir.get().toAbsolutePath().toString() : ""; } return androidSdkDir; } /** * Runs the command "buck quickstart", which copies a template project into a new directory to * give the user a functional buck project. It copies from * src/com/facebook/buck/cli/quickstart/android to the directory specified. It then asks the user * for the location of the Android SDK so Buck can successfully build the quickstart project. It * will fail if it cannot find the template directory or if it is unable to write to the * destination directory. * * @return status code - zero means no problem * @throws IOException if the command fails to read from the template project or write to the * new project */ @Override public int runWithoutHelp(CommandRunnerParams params) throws IOException { String projectDir = getDestDir().trim(); if (projectDir.isEmpty()) { projectDir = promptForPath( params, "Enter the directory where you would like to create the project: "); } File dir = new File(projectDir); while (!dir.isDirectory() && !dir.mkdirs() && !projectDir.isEmpty()) { projectDir = promptForPath( params, "Cannot create project directory. Enter another directory: "); dir = new File(projectDir); } if (projectDir.isEmpty()) { params .getConsole() .getStdErr() .println("No project directory specified. Aborting quickstart."); return 1; } String sdkLocation = getAndroidSdkDir(params.getRepository().getAndroidDirectoryResolver()); if (sdkLocation.isEmpty()) { sdkLocation = promptForPath(params, "Enter your Android SDK's location: "); } File sdkLocationFile = new File(sdkLocation); if (!sdkLocationFile.isDirectory()) { params .getConsole() .getStdErr() .println("WARNING: That Android SDK directory does not exist."); } sdkLocation = sdkLocationFile.getAbsoluteFile().toString(); Path origin = PATH_TO_QUICKSTART_DIR; Path destination = Paths.get(projectDir); MoreFiles.copyRecursively(origin, destination); // Specify the default Android target so everyone on the project builds against the same SDK. File buckConfig = new File(projectDir + "/.buckconfig"); Files.append( "[android]\n target = " + AndroidPlatformTarget.DEFAULT_ANDROID_PLATFORM_TARGET + "\n", buckConfig, StandardCharsets.UTF_8); File localProperties = new File(projectDir + "/local.properties"); Files.write("sdk.dir=" + sdkLocation + "\n", localProperties, StandardCharsets.UTF_8); params.getConsole().getStdOut().print( Files.toString(origin.resolve("README.md").toFile(), StandardCharsets.UTF_8)); params.getConsole().getStdOut().flush(); return 0; } @Override public boolean isReadOnly() { return false; } private String promptForPath(CommandRunnerParams params, String prompt) throws IOException { params.getConsole().getStdOut().print(prompt); params.getConsole().getStdOut().flush(); BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); String path = br.readLine(); if (path != null) { return expandTilde(path.trim()); } return ""; } /** * A simple function to convert "~" to the home directory (i.e. /home/user) in paths. It does not * support the shortcut for other users' home directories, "~user". If the path does not start * with "~/", then this function returns the same string it was given. * * @param path an absolute path string that begins with "~/" * @return an absolute path with an expanded home directory */ protected static String expandTilde(String path) { return expandTildeInternal(System.getProperty("user.home"), path); } @VisibleForTesting static String expandTildeInternal(String homeDir, String path) { if (path.startsWith("~/")) { return homeDir + path.substring(1); } else if (path.equals("~")) { return homeDir; } else { return path; } } @Override public String getShortDescription() { return "generates a default project directory"; } }
/* * Copyright (c) 2010-2019 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.model.common.mapping; import static org.testng.AssertJUnit.assertEquals; import java.io.File; import java.io.IOException; import java.util.Collection; import javax.xml.bind.JAXBException; import javax.xml.namespace.QName; import com.evolveum.midpoint.prism.crypto.Protector; import com.evolveum.midpoint.prism.delta.*; import com.evolveum.midpoint.prism.path.ItemName; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.prism.xml.ns._public.types_3.ProtectedStringType; import org.xml.sax.SAXException; import com.evolveum.midpoint.repo.common.DirectoryFileObjectResolver; import com.evolveum.midpoint.repo.common.ObjectResolver; import com.evolveum.midpoint.repo.common.expression.ExpressionFactory; import com.evolveum.midpoint.repo.common.expression.Source; import com.evolveum.midpoint.repo.common.expression.ValuePolicyResolver; import com.evolveum.midpoint.common.Clock; import com.evolveum.midpoint.model.common.expression.ExpressionTestUtil; import com.evolveum.midpoint.prism.ItemDefinition; import com.evolveum.midpoint.prism.OriginType; import com.evolveum.midpoint.prism.PrismContext; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.PrismObjectDefinition; import com.evolveum.midpoint.prism.PrismPropertyDefinition; import com.evolveum.midpoint.prism.PrismPropertyValue; import com.evolveum.midpoint.prism.crypto.EncryptionException; import com.evolveum.midpoint.prism.util.ObjectDeltaObject; import com.evolveum.midpoint.prism.util.PrismTestUtil; import com.evolveum.midpoint.schema.MidPointPrismContextFactory; import com.evolveum.midpoint.schema.constants.ExpressionConstants; import com.evolveum.midpoint.schema.constants.MidPointConstants; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.test.util.MidPointTestConstants; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.util.PrettyPrinter; import com.evolveum.midpoint.util.exception.CommunicationException; import com.evolveum.midpoint.util.exception.ConfigurationException; import com.evolveum.midpoint.util.exception.ExpressionEvaluationException; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.exception.SecurityViolationException; import com.evolveum.midpoint.xml.ns._public.common.common_3.MappingType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ValuePolicyType; /** * The class that takes care of all the ornaments of value construction execution. It is used to make the * tests easy to write. * * @author Radovan Semancik * */ public class MappingTestEvaluator { public static File TEST_DIR = new File(MidPointTestConstants.TEST_RESOURCES_DIR, "mapping"); public static final File USER_OLD_FILE = new File(TEST_DIR, "user-jack.xml"); public static final File ACCOUNT_FILE = new File(TEST_DIR, "account-jack.xml"); public static final String USER_OLD_OID = "2f9b9299-6f45-498f-bc8e-8d17c6b93b20"; private static final File PASSWORD_POLICY_FILE = new File(TEST_DIR, "password-policy.xml"); private PrismContext prismContext; private MappingFactory mappingFactory; private Protector protector; public PrismContext getPrismContext() { return prismContext; } public void init() throws SchemaException, SAXException, IOException { PrettyPrinter.setDefaultNamespacePrefix(MidPointConstants.NS_MIDPOINT_PUBLIC_PREFIX); PrismTestUtil.resetPrismContext(MidPointPrismContextFactory.FACTORY); prismContext = PrismTestUtil.createInitializedPrismContext(); ObjectResolver resolver = new DirectoryFileObjectResolver(MidPointTestConstants.OBJECTS_DIR); protector = ExpressionTestUtil.createInitializedProtector(prismContext); Clock clock = new Clock(); ExpressionFactory expressionFactory = ExpressionTestUtil.createInitializedExpressionFactory(resolver, protector, prismContext, clock, null, null); mappingFactory = new MappingFactory(); mappingFactory.setExpressionFactory(expressionFactory); mappingFactory.setObjectResolver(resolver); mappingFactory.setPrismContext(prismContext); mappingFactory.setProfiling(true); mappingFactory.setProtector(protector); } public Protector getProtector() { return protector; } public <T> MappingImpl<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMapping(String filename, String testName, final ValuePolicyType policy, String defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { return this.<T>createMappingBuilder(filename, testName, policy, toPath(defaultTargetPropertyName), userDelta).build(); } public <T> MappingImpl<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMapping(String filename, String testName, String defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { return this.<T>createMappingBuilder(filename, testName, null, toPath(defaultTargetPropertyName), userDelta).build(); } public <T> MappingImpl.Builder<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMappingBuilder(String filename, String testName, String defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { return createMappingBuilder(filename, testName, null, toPath(defaultTargetPropertyName), userDelta); } public <T> MappingImpl<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMapping(String filename, String testName, ItemName defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { return this.<T>createMappingBuilder(filename, testName, null, defaultTargetPropertyName, userDelta).build(); } public <T> MappingImpl<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMapping(String filename, String testName, String defaultTargetPropertyName, ObjectDelta<UserType> userDelta, PrismObject<UserType> userOld) throws SchemaException, IOException, JAXBException { return this.<T>createMappingBuilder(filename, testName, null, toPath(defaultTargetPropertyName), userDelta, userOld).build(); } public <T> MappingImpl.Builder<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMappingBuilder(String filename, String testName, String defaultTargetPropertyName, ObjectDelta<UserType> userDelta, PrismObject<UserType> userOld) throws SchemaException, IOException, JAXBException { return this.createMappingBuilder(filename, testName, null, toPath(defaultTargetPropertyName), userDelta, userOld); } public <T> MappingImpl<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMapping(String filename, String testName, ItemPath defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { return this.<T>createMappingBuilder(filename, testName, null, defaultTargetPropertyName, userDelta).build(); } public <T> MappingImpl.Builder<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMappingBuilder(String filename, String testName, final ValuePolicyType policy, ItemPath defaultTargetPropertyPath, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { PrismObject<UserType> userOld = null; if (userDelta == null || !userDelta.isAdd()) { userOld = getUserOld(); } return createMappingBuilder(filename, testName, policy, defaultTargetPropertyPath, userDelta, userOld); } public <T> MappingImpl.Builder<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMappingBuilder(String filename, String testName, final ValuePolicyType policy, ItemPath defaultTargetPropertyPath, ObjectDelta<UserType> userDelta, PrismObject<UserType> userOld) throws SchemaException, IOException, JAXBException { MappingType mappingType = PrismTestUtil.parseAtomicValue( new File(TEST_DIR, filename), MappingType.COMPLEX_TYPE); MappingImpl.Builder<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mappingBuilder = mappingFactory.createMappingBuilder(mappingType, testName); mappingBuilder.prismContext(prismContext); // Source context: user ObjectDeltaObject<UserType> userOdo = new ObjectDeltaObject<>(userOld, userDelta, null, prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(UserType.class)); userOdo.recompute(); mappingBuilder.sourceContext(userOdo); // Variable $user mappingBuilder.addVariableDefinition(ExpressionConstants.VAR_USER, userOdo); // Variable $account PrismObject<ShadowType> account = getAccount(); ObjectDeltaObject<ShadowType> accountOdo = new ObjectDeltaObject<>(account, null, null, account.getDefinition()); accountOdo.recompute(); mappingBuilder.addVariableDefinition(ExpressionConstants.VAR_ACCOUNT, accountOdo); // Target context: user PrismObjectDefinition<UserType> userDefinition = getUserDefinition(); mappingBuilder.targetContext(userDefinition); ValuePolicyResolver stringPolicyResolver = new ValuePolicyResolver() { ItemPath outputPath; ItemDefinition outputDefinition; @Override public void setOutputPath(ItemPath outputPath) { this.outputPath = outputPath; } @Override public void setOutputDefinition(ItemDefinition outputDefinition) { this.outputDefinition = outputDefinition; } @Override public ValuePolicyType resolve() { return policy; } }; mappingBuilder.setStringPolicyResolver(stringPolicyResolver); // Default target if (defaultTargetPropertyPath != null) { PrismPropertyDefinition<T> targetDefDefinition = userDefinition.findItemDefinition(defaultTargetPropertyPath); if (targetDefDefinition == null) { throw new IllegalArgumentException("The item path '"+defaultTargetPropertyPath+"' does not have a definition in "+userDefinition); } mappingBuilder.setDefaultTargetDefinition(targetDefDefinition); } return mappingBuilder; } public <T> MappingImpl<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createInboudMapping(String filename, String testName, ItemDelta delta, UserType user, ShadowType account, ResourceType resource, final ValuePolicyType policy) throws SchemaException, IOException, JAXBException{ MappingType mappingType = PrismTestUtil.parseAtomicValue( new File(TEST_DIR, filename), MappingType.COMPLEX_TYPE); MappingImpl.Builder<PrismPropertyValue<T>,PrismPropertyDefinition<T>> builder = mappingFactory.createMappingBuilder(mappingType, testName); Source<PrismPropertyValue<T>,PrismPropertyDefinition<T>> defaultSource = new Source<>(null, delta, null, ExpressionConstants.VAR_INPUT_QNAME, delta.getDefinition()); defaultSource.recompute(); builder.setDefaultSource(defaultSource); builder.setTargetContext(getUserDefinition()); builder.addVariableDefinition(ExpressionConstants.VAR_USER, user, UserType.class); builder.addVariableDefinition(ExpressionConstants.VAR_FOCUS, user, UserType.class); builder.addVariableDefinition(ExpressionConstants.VAR_ACCOUNT, account.asPrismObject(), ShadowType.class); builder.addVariableDefinition(ExpressionConstants.VAR_SHADOW, account.asPrismObject(), ShadowType.class); builder.addVariableDefinition(ExpressionConstants.VAR_PROJECTION, account.asPrismObject(), ShadowType.class); ValuePolicyResolver stringPolicyResolver = new ValuePolicyResolver() { ItemPath outputPath; ItemDefinition outputDefinition; @Override public void setOutputPath(ItemPath outputPath) { this.outputPath = outputPath; } @Override public void setOutputDefinition(ItemDefinition outputDefinition) { this.outputDefinition = outputDefinition; } @Override public ValuePolicyType resolve() { return policy; } }; builder.setStringPolicyResolver(stringPolicyResolver); builder.setOriginType(OriginType.INBOUND); builder.setOriginObject(resource); return builder.build(); } protected PrismObject<UserType> getUserOld() throws SchemaException, EncryptionException, IOException { PrismObject<UserType> user = PrismTestUtil.parseObject(USER_OLD_FILE); ProtectedStringType passwordPs = user.asObjectable().getCredentials().getPassword().getValue(); protector.encrypt(passwordPs); return user; } protected PrismObject<ShadowType> getAccount() throws SchemaException, IOException { return PrismTestUtil.parseObject(ACCOUNT_FILE); } public PrismObjectDefinition<UserType> getUserDefinition() { return prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(UserType.class); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMapping(String filename, String testName, ItemPath defaultTargetPropertyPath) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException, SecurityViolationException, ConfigurationException, CommunicationException { MappingImpl<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyPath, null); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple(); if (outputTriple != null) { outputTriple.checkConsistence(); } return outputTriple; } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMapping(String filename, String testName, ItemName defaultTargetPropertyName) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException, SecurityViolationException, ConfigurationException, CommunicationException { MappingImpl<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, null); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple(); if (outputTriple != null) { outputTriple.checkConsistence(); } return outputTriple; } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMapping(String filename, String testName, String defaultTargetPropertyName) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException, SecurityViolationException, ConfigurationException, CommunicationException { MappingImpl<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, null); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple(); if (outputTriple != null) { outputTriple.checkConsistence(); } return outputTriple; } public void assertResult(OperationResult opResult) { if (opResult.isEmpty()) { // this is OK. Nothing added to result. return; } opResult.computeStatus(); TestUtil.assertSuccess(opResult); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicAdd(String filename, String testName, String defaultTargetPropertyName, String changedPropertyName, I... valuesToAdd) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException, SecurityViolationException, ConfigurationException, CommunicationException { return evaluateMappingDynamicAdd(filename, testName, toPath(defaultTargetPropertyName), changedPropertyName, valuesToAdd); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicAdd(String filename, String testName, ItemPath defaultTargetPropertyPath, String changedPropertyName, I... valuesToAdd) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException, SecurityViolationException, ConfigurationException, CommunicationException { ObjectDelta<UserType> userDelta = prismContext.deltaFactory().object() .createModificationAddProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName), valuesToAdd); MappingImpl<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyPath, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple(); if (outputTriple != null) { outputTriple.checkConsistence(); } return outputTriple; } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicDelete(String filename, String testName, String defaultTargetPropertyName, String changedPropertyName, I... valuesToAdd) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException, SecurityViolationException, ConfigurationException, CommunicationException { ObjectDelta<UserType> userDelta = prismContext.deltaFactory().object() .createModificationDeleteProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName), valuesToAdd); MappingImpl<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple(); if (outputTriple != null) { outputTriple.checkConsistence(); } return outputTriple; } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName, String defaultTargetPropertyName, String changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException, SecurityViolationException, ConfigurationException, CommunicationException { ObjectDelta<UserType> userDelta = prismContext.deltaFactory().object() .createModificationReplaceProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName), valuesToReplace); MappingImpl<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple(); if (outputTriple != null) { outputTriple.checkConsistence(); } return outputTriple; } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName, String defaultTargetPropertyName, ItemPath changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException, SecurityViolationException, ConfigurationException, CommunicationException { ObjectDelta<UserType> userDelta = prismContext.deltaFactory().object() .createModificationReplaceProperty(UserType.class, USER_OLD_OID, changedPropertyName, valuesToReplace); MappingImpl<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple(); if (outputTriple != null) { outputTriple.checkConsistence(); } return outputTriple; } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName, ItemPath defaultTargetPropertyName, String changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException, SecurityViolationException, ConfigurationException, CommunicationException { ObjectDelta<UserType> userDelta = prismContext.deltaFactory().object() .createModificationReplaceProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName), valuesToReplace); MappingImpl<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple(); if (outputTriple != null) { outputTriple.checkConsistence(); } return outputTriple; } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName, ItemPath defaultTargetPropertyName, ItemPath changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException, SecurityViolationException, ConfigurationException, CommunicationException { ObjectDelta<UserType> userDelta = prismContext.deltaFactory().object() .createModificationReplaceProperty(UserType.class, USER_OLD_OID, changedPropertyName, valuesToReplace); MappingImpl<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple(); if (outputTriple != null) { outputTriple.checkConsistence(); } return outputTriple; } public ItemPath toPath(String propertyName) { return ItemPath.create(new QName(SchemaConstants.NS_C, propertyName)); } public static <T> T getSingleValue(String setName, Collection<PrismPropertyValue<T>> set) { assertEquals("Expected single value in "+setName+" but found "+set.size()+" values: "+set, 1, set.size()); PrismPropertyValue<T> propertyValue = set.iterator().next(); return propertyValue.getValue(); } public ValuePolicyType getValuePolicy() throws SchemaException, IOException { PrismObject<ValuePolicyType> passwordPolicy = PrismTestUtil.parseObject(PASSWORD_POLICY_FILE); return passwordPolicy.asObjectable(); } public Object createProtectedString(String string) throws EncryptionException { return protector.encryptString(string); } public void assertProtectedString(String desc, Collection<PrismPropertyValue<ProtectedStringType>> set, String expected) throws EncryptionException { assertEquals("Unexpected size of "+desc+": "+set, 1, set.size()); PrismPropertyValue<ProtectedStringType> pval = set.iterator().next(); ProtectedStringType ps = pval.getValue(); String zeroString = protector.decryptString(ps); assertEquals("Unexpected value in "+desc+": "+set, expected, zeroString); } }
/** * generated by Xtext 2.17.1 */ package org.xtext.specmate.specDSL.impl; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; import org.xtext.specmate.specDSL.ExplicitNode; import org.xtext.specmate.specDSL.OptionNode; import org.xtext.specmate.specDSL.SpecDSLPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Option Node</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.xtext.specmate.specDSL.impl.OptionNodeImpl#getLeftNode <em>Left Node</em>}</li> * <li>{@link org.xtext.specmate.specDSL.impl.OptionNodeImpl#getRightNodes <em>Right Nodes</em>}</li> * </ul> * * @generated */ public class OptionNodeImpl extends NodeImpl implements OptionNode { /** * The cached value of the '{@link #getLeftNode() <em>Left Node</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLeftNode() * @generated * @ordered */ protected ExplicitNode leftNode; /** * The cached value of the '{@link #getRightNodes() <em>Right Nodes</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRightNodes() * @generated * @ordered */ protected EList<ExplicitNode> rightNodes; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected OptionNodeImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return SpecDSLPackage.Literals.OPTION_NODE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ExplicitNode getLeftNode() { return leftNode; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetLeftNode(ExplicitNode newLeftNode, NotificationChain msgs) { ExplicitNode oldLeftNode = leftNode; leftNode = newLeftNode; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, SpecDSLPackage.OPTION_NODE__LEFT_NODE, oldLeftNode, newLeftNode); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setLeftNode(ExplicitNode newLeftNode) { if (newLeftNode != leftNode) { NotificationChain msgs = null; if (leftNode != null) msgs = ((InternalEObject)leftNode).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - SpecDSLPackage.OPTION_NODE__LEFT_NODE, null, msgs); if (newLeftNode != null) msgs = ((InternalEObject)newLeftNode).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - SpecDSLPackage.OPTION_NODE__LEFT_NODE, null, msgs); msgs = basicSetLeftNode(newLeftNode, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, SpecDSLPackage.OPTION_NODE__LEFT_NODE, newLeftNode, newLeftNode)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<ExplicitNode> getRightNodes() { if (rightNodes == null) { rightNodes = new EObjectContainmentEList<ExplicitNode>(ExplicitNode.class, this, SpecDSLPackage.OPTION_NODE__RIGHT_NODES); } return rightNodes; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case SpecDSLPackage.OPTION_NODE__LEFT_NODE: return basicSetLeftNode(null, msgs); case SpecDSLPackage.OPTION_NODE__RIGHT_NODES: return ((InternalEList<?>)getRightNodes()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case SpecDSLPackage.OPTION_NODE__LEFT_NODE: return getLeftNode(); case SpecDSLPackage.OPTION_NODE__RIGHT_NODES: return getRightNodes(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case SpecDSLPackage.OPTION_NODE__LEFT_NODE: setLeftNode((ExplicitNode)newValue); return; case SpecDSLPackage.OPTION_NODE__RIGHT_NODES: getRightNodes().clear(); getRightNodes().addAll((Collection<? extends ExplicitNode>)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case SpecDSLPackage.OPTION_NODE__LEFT_NODE: setLeftNode((ExplicitNode)null); return; case SpecDSLPackage.OPTION_NODE__RIGHT_NODES: getRightNodes().clear(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case SpecDSLPackage.OPTION_NODE__LEFT_NODE: return leftNode != null; case SpecDSLPackage.OPTION_NODE__RIGHT_NODES: return rightNodes != null && !rightNodes.isEmpty(); } return super.eIsSet(featureID); } } //OptionNodeImpl
/* * Copyright 2016 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.internal.client.thrift; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import javax.annotation.Nullable; import org.apache.thrift.TApplicationException; import org.apache.thrift.TBase; import org.apache.thrift.TException; import org.apache.thrift.TFieldIdEnum; import org.apache.thrift.protocol.TMessage; import org.apache.thrift.protocol.TMessageType; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.protocol.TProtocolFactory; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import com.google.common.base.Strings; import com.linecorp.armeria.client.ClientRequestContext; import com.linecorp.armeria.client.DecoratingClient; import com.linecorp.armeria.client.Endpoint; import com.linecorp.armeria.client.HttpClient; import com.linecorp.armeria.client.InvalidResponseHeadersException; import com.linecorp.armeria.client.RpcClient; import com.linecorp.armeria.common.CompletableRpcResponse; import com.linecorp.armeria.common.HttpData; import com.linecorp.armeria.common.HttpMethod; import com.linecorp.armeria.common.HttpRequest; import com.linecorp.armeria.common.HttpResponse; import com.linecorp.armeria.common.HttpStatus; import com.linecorp.armeria.common.MediaType; import com.linecorp.armeria.common.RequestHeaders; import com.linecorp.armeria.common.RpcRequest; import com.linecorp.armeria.common.RpcResponse; import com.linecorp.armeria.common.SerializationFormat; import com.linecorp.armeria.common.logging.RequestLogProperty; import com.linecorp.armeria.common.thrift.ThriftCall; import com.linecorp.armeria.common.thrift.ThriftProtocolFactories; import com.linecorp.armeria.common.thrift.ThriftReply; import com.linecorp.armeria.common.util.CompletionActions; import com.linecorp.armeria.common.util.Exceptions; import com.linecorp.armeria.internal.common.thrift.TApplicationExceptions; import com.linecorp.armeria.internal.common.thrift.TByteBufTransport; import com.linecorp.armeria.internal.common.thrift.ThriftFieldAccess; import com.linecorp.armeria.internal.common.thrift.ThriftFunction; import com.linecorp.armeria.internal.common.thrift.ThriftServiceMetadata; import io.netty.buffer.ByteBuf; final class THttpClientDelegate extends DecoratingClient<HttpRequest, HttpResponse, RpcRequest, RpcResponse> implements RpcClient { private final AtomicInteger nextSeqId = new AtomicInteger(); private final SerializationFormat serializationFormat; private final TProtocolFactory protocolFactory; private final MediaType mediaType; private final Map<Class<?>, ThriftServiceMetadata> metadataMap = new ConcurrentHashMap<>(); THttpClientDelegate(HttpClient httpClient, SerializationFormat serializationFormat) { super(httpClient); this.serializationFormat = serializationFormat; protocolFactory = ThriftProtocolFactories.get(serializationFormat); mediaType = serializationFormat.mediaType(); } @Override public RpcResponse execute(ClientRequestContext ctx, RpcRequest call) { final int seqId = nextSeqId.incrementAndGet(); final String method = call.method(); final List<Object> args = call.params(); final CompletableRpcResponse reply = new CompletableRpcResponse(); ctx.logBuilder().serializationFormat(serializationFormat); final ThriftFunction func; try { func = metadata(call.serviceType()).function(method); if (func == null) { throw new IllegalArgumentException("Thrift method not found: " + method); } } catch (Throwable cause) { reply.completeExceptionally(cause); return reply; } try { final TMessage header = new TMessage(fullMethod(ctx, method), func.messageType(), seqId); final ByteBuf buf = ctx.alloc().buffer(128); try { final TByteBufTransport outTransport = new TByteBufTransport(buf); final TProtocol tProtocol = protocolFactory.getProtocol(outTransport); tProtocol.writeMessageBegin(header); @SuppressWarnings("rawtypes") final TBase tArgs = func.newArgs(args); tArgs.write(tProtocol); tProtocol.writeMessageEnd(); ctx.logBuilder().requestContent(call, new ThriftCall(header, tArgs)); } catch (Throwable t) { buf.release(); Exceptions.throwUnsafely(t); } final Endpoint endpoint = ctx.endpoint(); final HttpRequest httpReq = HttpRequest.of( RequestHeaders.builder(HttpMethod.POST, ctx.path()) .scheme(ctx.sessionProtocol()) .authority(endpoint != null ? endpoint.authority() : "UNKNOWN") .contentType(mediaType) .build(), HttpData.wrap(buf).withEndOfStream()); ctx.updateRequest(httpReq); ctx.logBuilder().defer(RequestLogProperty.RESPONSE_CONTENT); final HttpResponse httpResponse; try { httpResponse = unwrap().execute(ctx, httpReq); } catch (Throwable t) { httpReq.abort(); throw t; } httpResponse.aggregateWithPooledObjects(ctx.eventLoop(), ctx.alloc()).handle((res, cause) -> { if (cause != null) { handlePreDecodeException(ctx, reply, func, Exceptions.peel(cause)); return null; } try (HttpData content = res.content()) { final HttpStatus status = res.status(); if (status.code() != HttpStatus.OK.code()) { handlePreDecodeException( ctx, reply, func, new InvalidResponseHeadersException(res.headers())); return null; } try { handle(ctx, seqId, reply, func, content); } catch (Throwable t) { handlePreDecodeException(ctx, reply, func, t); } } return null; }).exceptionally(CompletionActions::log); } catch (Throwable cause) { handlePreDecodeException(ctx, reply, func, cause); } return reply; } private static String fullMethod(ClientRequestContext ctx, String method) { final String service = ctx.fragment(); if (Strings.isNullOrEmpty(service)) { return method; } else { return service + ':' + method; } } private ThriftServiceMetadata metadata(Class<?> serviceType) { final ThriftServiceMetadata metadata = metadataMap.get(serviceType); if (metadata != null) { return metadata; } return metadataMap.computeIfAbsent(serviceType, ThriftServiceMetadata::new); } private void handle(ClientRequestContext ctx, int seqId, CompletableRpcResponse reply, ThriftFunction func, HttpData content) throws TException { if (func.isOneWay()) { handleSuccess(ctx, reply, null, null); return; } if (content.isEmpty()) { throw new TApplicationException(TApplicationException.MISSING_RESULT); } final TTransport inputTransport = new TByteBufTransport(content.byteBuf()); final TProtocol inputProtocol = protocolFactory.getProtocol(inputTransport); final TMessage header = inputProtocol.readMessageBegin(); final TApplicationException appEx = readApplicationException(seqId, func, inputProtocol, header); if (appEx != null) { handleException(ctx, reply, new ThriftReply(header, appEx), appEx); return; } final TBase<?, ?> result = func.newResult(); result.read(inputProtocol); inputProtocol.readMessageEnd(); final ThriftReply rawResponseContent = new ThriftReply(header, result); for (TFieldIdEnum fieldIdEnum : func.exceptionFields()) { if (ThriftFieldAccess.isSet(result, fieldIdEnum)) { final TException cause = (TException) ThriftFieldAccess.get(result, fieldIdEnum); handleException(ctx, reply, rawResponseContent, cause); return; } } final TFieldIdEnum successField = func.successField(); if (successField == null) { // void method handleSuccess(ctx, reply, null, rawResponseContent); return; } if (ThriftFieldAccess.isSet(result, successField)) { final Object returnValue = ThriftFieldAccess.get(result, successField); handleSuccess(ctx, reply, returnValue, rawResponseContent); return; } handleException( ctx, reply, rawResponseContent, new TApplicationException(TApplicationException.MISSING_RESULT, result.getClass().getName() + '.' + successField.getFieldName())); } @Nullable private static TApplicationException readApplicationException(int seqId, ThriftFunction func, TProtocol inputProtocol, TMessage msg) throws TException { if (msg.seqid != seqId) { throw new TApplicationException(TApplicationException.BAD_SEQUENCE_ID); } if (!func.name().equals(msg.name)) { return new TApplicationException(TApplicationException.WRONG_METHOD_NAME, msg.name); } if (msg.type == TMessageType.EXCEPTION) { final TApplicationException appEx = TApplicationExceptions.read(inputProtocol); inputProtocol.readMessageEnd(); return appEx; } return null; } private static void handleSuccess(ClientRequestContext ctx, CompletableRpcResponse reply, @Nullable Object returnValue, @Nullable ThriftReply rawResponseContent) { reply.complete(returnValue); ctx.logBuilder().responseContent(reply, rawResponseContent); } private static void handleException(ClientRequestContext ctx, CompletableRpcResponse reply, @Nullable ThriftReply rawResponseContent, Exception cause) { reply.completeExceptionally(cause); ctx.logBuilder().responseContent(reply, rawResponseContent); } private static void handlePreDecodeException(ClientRequestContext ctx, CompletableRpcResponse reply, ThriftFunction thriftMethod, Throwable cause) { handleException(ctx, reply, null, decodeException(cause, thriftMethod.declaredExceptions())); } private static Exception decodeException(Throwable cause, @Nullable Class<?>[] declaredThrowableExceptions) { if (cause instanceof RuntimeException || cause instanceof TException) { return (Exception) cause; } final boolean isDeclaredException; if (declaredThrowableExceptions != null) { isDeclaredException = Arrays.stream(declaredThrowableExceptions).anyMatch(v -> v.isInstance(cause)); } else { isDeclaredException = false; } if (isDeclaredException) { return (Exception) cause; } else if (cause instanceof Error) { return new RuntimeException(cause); } else { return new TTransportException(cause); } } }
package gui.scripting; import interpreter.Check; import interpreter.ScriptCommand; import interpreter.enumerations.*; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.scene.Node; import javafx.scene.control.Alert; import javafx.scene.control.TextInputDialog; import javax.management.InstanceNotFoundException; import java.util.*; import java.util.stream.Collectors; final class MouseEvents { private BehaviorList behaviorList; private List<Node> allButtons; /** * Handles click events for choice buttons. Attempts to add the selected item to the selected behavior * in the list. */ EventHandler<ActionEvent> choiceButtonClick = new EventHandler<ActionEvent>() { /** * @param event Click event to be handled */ @Override public void handle(ActionEvent event) { try { // Get the last selected behavior from the list Behavior behavior = (Behavior) behaviorList.getToggleGroup().getSelectedToggle(); if (behavior.isSelected()) { ChoiceButton currentButton = (ChoiceButton) event.getTarget(); ScriptButton buttonToAdd = new ScriptButton(); if (currentButton.getText().equals(Data.USER_INPUT.text())) { // create alert to enter an integer TextInputDialog dialog = new TextInputDialog("0"); dialog.setHeaderText("Please enter a number between 0 and 2,000,000,000"); dialog.setContentText("User Input: "); boolean isInt = false; while (!isInt) { Optional<String> result = dialog.showAndWait(); if (result.isPresent()) { try { int input = Integer.parseInt(result.get().trim()); if (input < 0 || input > 2000000000) { continue; } isInt = true; buttonToAdd = new ScriptButton(result.get().trim()); } catch (Exception ex) { // continue } } else { return; } } } else { buttonToAdd = new ScriptButton(currentButton.getText()); } // Copy the style of the clicked button to the newly generated ScriptButton buttonToAdd.setStyle(currentButton.getStyle()); behavior.getChildren().add(buttonToAdd); List<ScriptButton> currentScript = behavior.getChildren().stream() .filter(b -> b instanceof ScriptButton) .map(b -> (ScriptButton) b) .collect(Collectors.toList()); List<String> allowables = getAllowableText(currentScript); enableButtons(allowables); } else { alertNoneSelected(); } } catch (Exception ex) { alertNoneSelected(); } } }; /** * Handles click events for the deleteWord button. Removes the last word in the selected behavior */ EventHandler<ActionEvent> removeLastWordClick = new EventHandler<ActionEvent>() { /** * @param event Click event to be handled */ @Override public void handle(ActionEvent event) { // Remove the last word in the selected behavior try { Behavior behavior = (Behavior) behaviorList.getToggleGroup().getSelectedToggle(); if (behavior.isSelected() && behaviorList.getChildren().contains(behavior)) { ScriptButton lastButton = (ScriptButton) behavior.getChildren().get(behavior.getChildren().size() - 1); behavior.getChildren().remove(lastButton); List<ScriptButton> currentScript = behavior.getChildren().stream() .filter(b -> b instanceof ScriptButton) .map(b -> (ScriptButton) b) .collect(Collectors.toList()); List<String> allowables = getAllowableText(currentScript); enableButtons(allowables); } else { alertNoneSelected(); } } catch (Exception ex) { alertNoneSelected(); } } }; MouseEvents(BehaviorList behaviorList, List<Node> allButtons) { this.behaviorList = behaviorList; this.allButtons = allButtons; } //region Static Methods /** * Get all button text that is valid at current state * * @param currentScript Script buttons that are active in the current Behavior * @return Returns a list of all Strings that are valid adds to the current Behavior */ static List<String> getAllowableText(List<ScriptButton> currentScript) { if (currentScript.isEmpty()) { // enforce 'if' return (Collections.singletonList("If")); } ScriptButton lastButton = currentScript.get(currentScript.size() - 1); String lastButtonText = lastButton.getText().trim(); boolean isInteger; try { int value = Integer.parseInt(lastButtonText.trim()); isInteger = true; } catch (Exception ex) { isInteger = false; } // enforce data if (ScriptingTypes.OPERATOR.list().contains(lastButtonText) || lastButtonText.equals(Conditional.IF.text()) || lastButtonText.equals(Conditional.AND.text())) { return ScriptingTypes.DATA.list(); } // enforce operator else if ((ScriptingTypes.DATA.list().contains(lastButtonText) || (isInteger)) && (currentScript.size() % 4) - 2 == 0) { return ScriptingTypes.OPERATOR.list(); } // enforce and or then else if (ScriptingTypes.DATA.list().contains(lastButtonText) || (isInteger)) { return Arrays.asList(Conditional.AND.text(), Conditional.THEN.text()); } // enforce command else if (lastButtonText.equals(Conditional.THEN.text())) { return ScriptingTypes.COMMAND.list(); } // disable all else { return Collections.emptyList(); } } /** * Builds the set of Check objects from the present enumerators * * @param scriptingObjects List of Scripting Enumerators to be parsed * @return Returns all check objects extracted from scriptingObjects */ private static ArrayList<Check> constructChecks(ArrayList<String> scriptingObjects) { ArrayList<Check> checks = new ArrayList<>(); for (int i = 0; i < scriptingObjects.size(); i++) { String object = scriptingObjects.get(i); if (ScriptingTypes.DATA.list().contains(object) || object.matches("\\d+")) { Operator op; try { op = getOperator(scriptingObjects.get(i + 1)); } catch (InstanceNotFoundException e) { e.printStackTrace(); // Should never happen return null; } Check check = new Check(scriptingObjects.get(i), scriptingObjects.get(i + 2), op); checks.add(check); // increment to end of the check that was located i = i + 2; } } return checks; } private static Operator getOperator(String text) throws InstanceNotFoundException { for (Operator operator : Operator.values()) { if (text.trim().equals(operator.text())) { return operator; } } throw new InstanceNotFoundException(); } List<ScriptCommand> getCommands() { ArrayList<ScriptCommand> commands = new ArrayList<>(); // get all behaviors in the list as Behavior Nodes List<Behavior> behaviors = behaviorList.getChildren().stream() .map(b -> (Behavior) b) .collect(Collectors.toList()); // Iterate behaviors on submitted script. // Each valid behavior is converted to a ScriptCommand object for (Behavior behavior : behaviors) { if (isCompleteBehavior(behavior)) { // Get the enumerated values of the current behavior ArrayList<String> scriptingObjects = getScriptingEnums(behavior); ArrayList<Check> checks = constructChecks(scriptingObjects); // Get Command enum for the command button Command command = getCommand(scriptingObjects.get(scriptingObjects.size() - 1)); commands.add(new ScriptCommand(checks, command)); } } return commands; } private Command getCommand(String text) { for (Command command : Command.values()) { if (text.trim().equals(command.text())) { return command; } } return null; } /** * Returns whether or not all behaviors in the list are complete * * @param allBehaviors List of all Behavior nodes to be checked * @return Returns true if all behaviors are complete. False otherwise */ boolean canAddBehaviors(List<Node> allBehaviors) { for (Node node : allBehaviors) { Behavior asBehavior = (Behavior) node; if (!isCompleteBehavior(asBehavior)) { return false; } } return true; } /** * Shows an informational alert stating that the selected action could not be completed since no behavior has been * selected */ void alertNoneSelected() { // Nothing is selected, show prompt Alert alert = new Alert(Alert.AlertType.INFORMATION); alert.setTitle("No Behavior Selected"); alert.setHeaderText("No Behavior Selected"); alert.setContentText("Please select a behavior from your list of behaviors prior to performing an action." + " If you have not yet created a behavior, select the green 'plus' button " + "to do so."); alert.showAndWait(); } /** * @param behavior Behavior to be checked for completeness * @return Returns true if behavior is well formed */ private boolean isCompleteBehavior(Behavior behavior) { if (behavior.getChildren().size() == 0) { return false; } ScriptButton lastButton = (ScriptButton) behavior.getChildren().get(behavior.getChildren().size() - 1); // We can do this since validity is enforced along the way return ScriptingTypes.COMMAND.list().contains(lastButton.getText()); } /** * Enable all buttons in validButtons, disable the rest that appear in the choicesPane * * @param validText List of valid Strings at current point */ void enableButtons(List<String> validText) { allButtons.forEach(b -> { if (b instanceof ChoiceButton) { if (validText.contains(((ChoiceButton) b).getText().trim())) { b.setDisable(false); b.setFocusTraversable(false); } else { b.setDisable(true); } } }); } /** * Gets the proper enum values that correspond with each button in the provided Behavior Node * * @param behavior Behavior node to be parsed for enumerators * @return Returns a list of enums parsed from behavior */ private ArrayList<String> getScriptingEnums(Behavior behavior) { ArrayList<String> objects = new ArrayList<>(); for (Node button : behavior.getChildren()) { String buttonText = ((ScriptButton) button).getText(); if (ScriptingTypes.COMMAND.list().contains(buttonText)) { for (Command command : Command.values()) { if (command.text().equals(buttonText)) { objects.add(command.text()); break; } } } else if (ScriptingTypes.OPERATOR.list().contains(buttonText)) { for (Operator operator : Operator.values()) { if (operator.text().equals(buttonText)) { objects.add(operator.text()); break; } } } else if (ScriptingTypes.DATA.list().contains(buttonText)) { for (Data data : Data.values()) { if (data.text().equals(buttonText)) { objects.add(data.text()); break; } } } else if (ScriptingTypes.CONDITIONAL.list().contains(buttonText)) { for (Conditional conditional : Conditional.values()) { if (conditional.text().equals(buttonText)) { objects.add(conditional.text()); break; } } } else { // itz a number dood objects.add(buttonText.trim()); } } return objects; } //endregion }
/* * This file is part of Flow Engine, licensed under the MIT License (MIT). * * Copyright (c) 2013 Spout LLC <http://www.spout.org/> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.flowpowered.api.geo.cuboid; import java.util.List; import java.util.Set; import com.flowpowered.api.Engine; import com.flowpowered.api.component.AbstractObserver; import com.flowpowered.api.entity.Entity; import com.flowpowered.api.geo.AreaBlockAccess; import com.flowpowered.api.geo.LoadOption; import com.flowpowered.api.geo.discrete.Point; import com.flowpowered.api.geo.reference.WorldReference; import com.flowpowered.api.material.block.BlockFace; import com.flowpowered.api.player.Player; import com.flowpowered.api.util.UnloadSavable; import com.flowpowered.commons.BitSize; import com.flowpowered.math.vector.Vector3i; /** * Represents a cube containing 16x16x16 Blocks */ public abstract class Chunk extends Cube implements AreaBlockAccess, UnloadSavable { /** * Stores the size of the amount of blocks in this Chunk */ public static final BitSize BLOCKS = new BitSize(4); /** * Mask to convert a block integer coordinate into the point base */ public final static int POINT_BASE_MASK = -BLOCKS.SIZE; /** * Mask to convert a block integer coordinate into the point base */ private final int blockX; private final int blockY; private final int blockZ; private final Engine engine; public Chunk(Engine engine, WorldReference world, int x, int y, int z) { super(new Point(world, x, y, z), BLOCKS.SIZE); this.blockX = getChunkX() << BLOCKS.BITS; this.blockY = getChunkY() << BLOCKS.BITS; this.blockZ = getChunkZ() << BLOCKS.BITS; this.engine = engine; } /** * Fills the given block container with the block data for this chunk */ public abstract void fillBlockContainer(BlockContainer container); /** * Gets the region that this chunk is located in * * @return region */ public abstract Region getRegion(); /** * Populates the chunk with all the Populators attached to the WorldGenerator of its world. */ public abstract boolean populate(); /** * Populates the chunk with all the Populators attached to the WorldGenerator of its world. * * @param force forces to populate the chunk even if it already has been populated. */ public abstract boolean populate(boolean force); /** * Populates the chunk with all the Populators attached to the WorldGenerator of its world.<br> <br> Warning: populating with force observer should not be called from within populators as it could * lead to a population cascade * * @param sync queues the population to occur at a later time * @param observe forces the chunk to be observed for population */ public abstract void populate(boolean sync, boolean observe); /** * Populates the chunk with all the Populators attached to the WorldGenerator of its world.<br> <br> Warning: populating with force observer should not be called from within populators as it could * lead to a population cascade * * @param sync queues the population to occur at a later time * @param observe forces the chunk to be observed for population * @param priority adds the chunk to the high priority queue */ public abstract void populate(boolean sync, boolean observe, boolean priority); /** * Gets if this chunk already has been populated. * * @return if the chunk is populated. */ public abstract boolean isPopulated(); /** * Gets the entities in the chunk at the last snapshot * * @return the entities */ public abstract List<Entity> getEntities(); /** * Gets the entities currently in the chunk * * @return the entities */ public abstract List<Entity> getLiveEntities(); /** * Gets the number of observers viewing this chunk. If the number of observing entities falls to zero, this chunk may be reaped at any time. * * @return number of observers */ public abstract int getNumObservers(); /** * Gets the observing players of this chunk (done based on the player's view distance). * * @return List containing the observing players */ public abstract Set<? extends Player> getObservingPlayers(); /** * Gets the observers of this chunk * * @return Set containing the observing players */ public abstract Set<? extends Entity> getObservers(); /** * Refresh the distance between a player and the chunk, and adds the player as an observer if not previously observing. * * @param player the player * @return false if the player was already observing the chunk */ public abstract boolean refreshObserver(AbstractObserver observer); /** * De-register a player as observing the chunk. * * @param player the player * @return true if the player was observing the chunk */ public abstract boolean removeObserver(AbstractObserver observer); @Override public boolean containsBlock(int x, int y, int z) { return x >> BLOCKS.BITS == getChunkX() && y >> BLOCKS.BITS == getChunkY() && z >> BLOCKS.BITS == getChunkZ(); } /** * Gets the x-coordinate of this chunk as a Block coordinate * * @return the x-coordinate of the first block in this chunk */ public int getBlockX() { return blockX; } /** * Gets the y-coordinate of this chunk as a Block coordinate * * @return the y-coordinate of the first block in this chunk */ public int getBlockY() { return blockY; } /** * Gets the z-coordinate of this chunk as a Block coordinate * * @return the z-coordinate of the first block in this chunk */ public int getBlockZ() { return blockZ; } public int getChunkX() { return (int) getX(); } public int getChunkY() { return (int) getY(); } public int getChunkZ() { return (int) getZ(); } /** * Gets a chunk relative to this chunk * * @param x * @param y * @param z * @param opt * @return The Chunk, or null if not loaded and load is False */ public Chunk getRelative(int x, int y, int z, LoadOption opt) { // We check to see if the chunk is in this chunk's region first, to avoid a map lookup for the other region final int otherChunkX = this.getChunkX() + x; final int otherChunkY = this.getChunkY() + y; final int otherChunkZ = this.getChunkZ() + z; final int regionX = getRegion().getRegionX(); final int regionY = getRegion().getRegionY(); final int regionZ = getRegion().getRegionZ(); final int otherRegionX = otherChunkX >> Region.CHUNKS.BITS; final int otherRegionY = otherChunkY >> Region.CHUNKS.BITS; final int otherRegionZ = otherChunkZ >> Region.CHUNKS.BITS; if (regionX == otherRegionX && regionZ == otherRegionZ && regionY == otherRegionY) { // Get the chunk from the current region return getRegion().getChunk(otherChunkX - otherRegionX, otherChunkY - otherRegionY, otherChunkZ - otherRegionZ, opt); } return this.getWorld().refresh(engine.getWorldManager()).getChunk(otherChunkX, otherChunkY, otherChunkZ, opt); } /** * Gets a chunk relative to this chunk * * @param offset of the chunk relative to this chunk * @param opt True to load the chunk if it is not yet loaded * @return The Chunk, or null if not loaded and load is False */ public Chunk getRelative(Vector3i offset, LoadOption opt) { return this.getWorld().refresh(engine.getWorldManager()).getChunk(this.getChunkX() + offset.getX(), this.getChunkY() + offset.getY(), this.getChunkZ() + offset.getZ(), opt); } /** * Gets a chunk relative to this chunk * * @param offset of the chunk relative to this chunk * @param opt True to load the chunk if it is not yet loaded * @return The Chunk, or null if not loaded and load is False */ public Chunk getRelative(BlockFace offset, LoadOption opt) { return this.getRelative(offset.getOffset(), opt); } /** * Gets the generation index for this chunk. Only chunks generated as part of the same bulk initialize have the same index. * * @return a unique generation id, or -1 if the chunk was loaded from disk */ public abstract int getGenerationIndex(); /** * Converts a point in such a way that it points to the first block (the base block) of the chunk<br> This is similar to performing the following operation on the x, y and z coordinate:<br> - Convert * to the chunk coordinate<br> - Multiply by chunk size */ public static Point pointToBase(Point p) { return new Point(p.getWorld(), p.getBlockX() & POINT_BASE_MASK, p.getBlockY() & POINT_BASE_MASK, p.getBlockZ() & POINT_BASE_MASK); } public Engine getEngine() { return engine; } }
/* * #%L * Native ARchive plugin for Maven * %% * Copyright (C) 2002 - 2014 NAR Maven Plugin developers. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.github.maven_nar.cpptasks.compiler; import java.io.File; import org.apache.tools.ant.BuildException; import com.github.maven_nar.cpptasks.CCTask; import com.github.maven_nar.cpptasks.CompilerParam; import com.github.maven_nar.cpptasks.DependencyInfo; import com.github.maven_nar.cpptasks.ProcessorParam; import com.github.maven_nar.cpptasks.VersionInfo; /** * A configuration for a C++ compiler * * @author Curt Arnold */ public final class CommandLineCompilerConfiguration implements CompilerConfiguration { private boolean useCcache; private/* final */String[] args; private final/* final */CommandLineCompiler compiler; private final String[] endArgs; // // include path from environment variable not // explicitly stated in Ant script private/* final */File[] envIncludePath; private String[] exceptFiles; private final/* final */String identifier; private/* final */File[] includePath; private final/* final */String includePathIdentifier; private final boolean isPrecompiledHeaderGeneration; private/* final */ProcessorParam[] params; private final/* final */boolean rebuild; private/* final */File[] sysIncludePath; private/* final */String commandPath; public CommandLineCompilerConfiguration(final CommandLineCompiler compiler, final String identifier, final File[] includePath, final File[] sysIncludePath, final File[] envIncludePath, final String includePathIdentifier, final String[] args, final ProcessorParam[] params, final boolean rebuild, final String[] endArgs) { this(compiler, identifier, includePath, sysIncludePath, envIncludePath, includePathIdentifier, args, params, rebuild, endArgs, null); } public CommandLineCompilerConfiguration(final CommandLineCompiler compiler, final String identifier, final File[] includePath, final File[] sysIncludePath, final File[] envIncludePath, final String includePathIdentifier, final String[] args, final ProcessorParam[] params, final boolean rebuild, final String[] endArgs, final String commandPath) { this(compiler, identifier, includePath, sysIncludePath, envIncludePath, includePathIdentifier, args, params, rebuild, endArgs, commandPath, false); } public CommandLineCompilerConfiguration(final CommandLineCompiler compiler, final String identifier, final File[] includePath, final File[] sysIncludePath, final File[] envIncludePath, final String includePathIdentifier, final String[] args, final ProcessorParam[] params, final boolean rebuild, final String[] endArgs, final String commandPath, final boolean useCcache) { if (compiler == null) { throw new NullPointerException("compiler"); } if (identifier == null) { throw new NullPointerException("identifier"); } if (includePathIdentifier == null) { throw new NullPointerException("includePathIdentifier"); } if (args == null) { this.args = new String[0]; } else { this.args = args.clone(); } if (includePath == null) { this.includePath = new File[0]; } else { this.includePath = includePath.clone(); } if (sysIncludePath == null) { this.sysIncludePath = new File[0]; } else { this.sysIncludePath = sysIncludePath.clone(); } if (envIncludePath == null) { this.envIncludePath = new File[0]; } else { this.envIncludePath = envIncludePath.clone(); } this.useCcache = useCcache; this.compiler = compiler; this.params = params.clone(); this.rebuild = rebuild; this.identifier = identifier; this.includePathIdentifier = includePathIdentifier; this.endArgs = endArgs.clone(); this.exceptFiles = null; this.isPrecompiledHeaderGeneration = false; this.commandPath = commandPath; } public CommandLineCompilerConfiguration(final CommandLineCompilerConfiguration base, final String[] additionalArgs, final String[] exceptFiles, final boolean isPrecompileHeaderGeneration) { this.compiler = base.compiler; this.identifier = base.identifier; this.rebuild = base.rebuild; this.includePath = base.includePath.clone(); this.sysIncludePath = base.sysIncludePath.clone(); this.endArgs = base.endArgs.clone(); this.envIncludePath = base.envIncludePath.clone(); this.includePathIdentifier = base.includePathIdentifier; if (exceptFiles != null) { this.exceptFiles = exceptFiles.clone(); } this.isPrecompiledHeaderGeneration = isPrecompileHeaderGeneration; if (additionalArgs != null) { this.args = new String[base.args.length + additionalArgs.length]; System.arraycopy(base.args, 0, this.args, 0, base.args.length); int index = base.args.length; for (final String additionalArg : additionalArgs) { this.args[index++] = additionalArg; } } else { this.args = base.args.clone(); } this.commandPath = base.commandPath; } @Override public int bid(final String inputFile) { final int compilerBid = this.compiler.bid(inputFile); if (compilerBid > 0 && this.exceptFiles != null) { for (final String exceptFile : this.exceptFiles) { if (inputFile.equals(exceptFile)) { return 0; } } } return compilerBid; } @Override public void compile(final CCTask task, final File outputDir, final String[] sourceFiles, final boolean relentless, final ProgressMonitor monitor) throws BuildException { if (monitor != null) { monitor.start(this); } try { this.compiler.compile(task, outputDir, sourceFiles, this.args, this.endArgs, relentless, this, monitor); if (monitor != null) { monitor.finish(this, true); } } catch (final BuildException ex) { if (monitor != null) { monitor.finish(this, false); } throw ex; } } /** * * This method may be used to get two distinct compiler configurations, one * for compiling the specified file and producing a precompiled header * file, and a second for compiling other files using the precompiled * header file. * * The last (preferrably only) include directive in the prototype file will * be used to mark the boundary between pre-compiled and normally compiled * headers. * * @param prototype * A source file (for example, stdafx.cpp) that is used to build * the precompiled header file. @returns null if precompiled * headers are not supported or a two element array containing * the precompiled header generation configuration and the * consuming configuration * */ @Override public CompilerConfiguration[] createPrecompileConfigurations(final File prototype, final String[] nonPrecompiledFiles) { if (this.compiler instanceof PrecompilingCompiler) { return ((PrecompilingCompiler) this.compiler) .createPrecompileConfigurations(this, prototype, nonPrecompiledFiles); } return null; } public String getCommand() { return this.compiler.getCommand(); } public final String getCommandPath() { return this.commandPath; } public Compiler getCompiler() { return this.compiler; } public String[] getEndArguments() { return this.endArgs.clone(); } /** * Returns a string representation of this configuration. Should be * canonical so that equivalent configurations will have equivalent string * representations */ @Override public String getIdentifier() { return this.identifier; } public File[] getIncludePath() { return this.includePath.clone(); } @Override public String getIncludePathIdentifier() { return this.includePathIdentifier; } @Override public String[] getOutputFileNames(final String inputFile, final VersionInfo versionInfo) { return this.compiler.getOutputFileNames(inputFile, versionInfo); } @Override public CompilerParam getParam(final String name) { for (final ProcessorParam param : this.params) { if (name.equals(param.getName())) { return (CompilerParam) param; } } return null; } @Override public ProcessorParam[] getParams() { return this.params; } public String[] getPreArguments() { return this.args.clone(); } @Override public boolean getRebuild() { return this.rebuild; } @Override public boolean isPrecompileGeneration() { return this.isPrecompiledHeaderGeneration; } public boolean isUseCcache() { return this.useCcache; } @Override public DependencyInfo parseIncludes(final CCTask task, final File baseDir, final File source) { return this.compiler.parseIncludes(task, source, this.includePath, this.sysIncludePath, this.envIncludePath, baseDir, getIncludePathIdentifier()); } public final void setCommandPath(final String commandPath) { this.commandPath = commandPath; } @Override public String toString() { return this.identifier; } }
// Template Source: BaseEntity.java.tt // ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ package com.microsoft.graph.models; import com.microsoft.graph.serializer.ISerializer; import com.microsoft.graph.serializer.IJsonBackedObject; import com.microsoft.graph.serializer.AdditionalDataManager; import java.util.EnumSet; import com.microsoft.graph.http.BaseCollectionPage; import com.microsoft.graph.models.ApplicationGuardBlockClipboardSharingType; import com.microsoft.graph.models.ApplicationGuardBlockFileTransferType; import com.microsoft.graph.models.AppLockerApplicationControlType; import com.microsoft.graph.models.BitLockerRemovableDrivePolicy; import com.microsoft.graph.models.FirewallCertificateRevocationListCheckMethodType; import com.microsoft.graph.models.FirewallPacketQueueingMethodType; import com.microsoft.graph.models.FirewallPreSharedKeyEncodingMethodType; import com.microsoft.graph.models.WindowsFirewallNetworkProfile; import com.microsoft.graph.models.DeviceConfiguration; import com.google.gson.JsonObject; import com.google.gson.annotations.SerializedName; import com.google.gson.annotations.Expose; import javax.annotation.Nullable; import javax.annotation.Nonnull; // **NOTE** This file was generated by a tool and any changes will be overwritten. /** * The class for the Windows10Endpoint Protection Configuration. */ public class Windows10EndpointProtectionConfiguration extends DeviceConfiguration implements IJsonBackedObject { /** * The Application Guard Allow Persistence. * Allow persisting user generated data inside the App Guard Containter (favorites, cookies, web passwords, etc.) */ @SerializedName(value = "applicationGuardAllowPersistence", alternate = {"ApplicationGuardAllowPersistence"}) @Expose @Nullable public Boolean applicationGuardAllowPersistence; /** * The Application Guard Allow Print To Local Printers. * Allow printing to Local Printers from Container */ @SerializedName(value = "applicationGuardAllowPrintToLocalPrinters", alternate = {"ApplicationGuardAllowPrintToLocalPrinters"}) @Expose @Nullable public Boolean applicationGuardAllowPrintToLocalPrinters; /** * The Application Guard Allow Print To Network Printers. * Allow printing to Network Printers from Container */ @SerializedName(value = "applicationGuardAllowPrintToNetworkPrinters", alternate = {"ApplicationGuardAllowPrintToNetworkPrinters"}) @Expose @Nullable public Boolean applicationGuardAllowPrintToNetworkPrinters; /** * The Application Guard Allow Print To PDF. * Allow printing to PDF from Container */ @SerializedName(value = "applicationGuardAllowPrintToPDF", alternate = {"ApplicationGuardAllowPrintToPDF"}) @Expose @Nullable public Boolean applicationGuardAllowPrintToPDF; /** * The Application Guard Allow Print To XPS. * Allow printing to XPS from Container */ @SerializedName(value = "applicationGuardAllowPrintToXPS", alternate = {"ApplicationGuardAllowPrintToXPS"}) @Expose @Nullable public Boolean applicationGuardAllowPrintToXPS; /** * The Application Guard Block Clipboard Sharing. * Block clipboard to share data from Host to Container, or from Container to Host, or both ways, or neither ways. Possible values are: notConfigured, blockBoth, blockHostToContainer, blockContainerToHost, blockNone. */ @SerializedName(value = "applicationGuardBlockClipboardSharing", alternate = {"ApplicationGuardBlockClipboardSharing"}) @Expose @Nullable public ApplicationGuardBlockClipboardSharingType applicationGuardBlockClipboardSharing; /** * The Application Guard Block File Transfer. * Block clipboard to transfer image file, text file or neither of them. Possible values are: notConfigured, blockImageAndTextFile, blockImageFile, blockNone, blockTextFile. */ @SerializedName(value = "applicationGuardBlockFileTransfer", alternate = {"ApplicationGuardBlockFileTransfer"}) @Expose @Nullable public ApplicationGuardBlockFileTransferType applicationGuardBlockFileTransfer; /** * The Application Guard Block Non Enterprise Content. * Block enterprise sites to load non-enterprise content, such as third party plug-ins */ @SerializedName(value = "applicationGuardBlockNonEnterpriseContent", alternate = {"ApplicationGuardBlockNonEnterpriseContent"}) @Expose @Nullable public Boolean applicationGuardBlockNonEnterpriseContent; /** * The Application Guard Enabled. * Enable Windows Defender Application Guard */ @SerializedName(value = "applicationGuardEnabled", alternate = {"ApplicationGuardEnabled"}) @Expose @Nullable public Boolean applicationGuardEnabled; /** * The Application Guard Force Auditing. * Force auditing will persist Windows logs and events to meet security/compliance criteria (sample events are user login-logoff, use of privilege rights, software installation, system changes, etc.) */ @SerializedName(value = "applicationGuardForceAuditing", alternate = {"ApplicationGuardForceAuditing"}) @Expose @Nullable public Boolean applicationGuardForceAuditing; /** * The App Locker Application Control. * Enables the Admin to choose what types of app to allow on devices. Possible values are: notConfigured, enforceComponentsAndStoreApps, auditComponentsAndStoreApps, enforceComponentsStoreAppsAndSmartlocker, auditComponentsStoreAppsAndSmartlocker. */ @SerializedName(value = "appLockerApplicationControl", alternate = {"AppLockerApplicationControl"}) @Expose @Nullable public AppLockerApplicationControlType appLockerApplicationControl; /** * The Bit Locker Disable Warning For Other Disk Encryption. * Allows the Admin to disable the warning prompt for other disk encryption on the user machines. */ @SerializedName(value = "bitLockerDisableWarningForOtherDiskEncryption", alternate = {"BitLockerDisableWarningForOtherDiskEncryption"}) @Expose @Nullable public Boolean bitLockerDisableWarningForOtherDiskEncryption; /** * The Bit Locker Enable Storage Card Encryption On Mobile. * Allows the admin to require encryption to be turned on using BitLocker. This policy is valid only for a mobile SKU. */ @SerializedName(value = "bitLockerEnableStorageCardEncryptionOnMobile", alternate = {"BitLockerEnableStorageCardEncryptionOnMobile"}) @Expose @Nullable public Boolean bitLockerEnableStorageCardEncryptionOnMobile; /** * The Bit Locker Encrypt Device. * Allows the admin to require encryption to be turned on using BitLocker. */ @SerializedName(value = "bitLockerEncryptDevice", alternate = {"BitLockerEncryptDevice"}) @Expose @Nullable public Boolean bitLockerEncryptDevice; /** * The Bit Locker Removable Drive Policy. * BitLocker Removable Drive Policy. */ @SerializedName(value = "bitLockerRemovableDrivePolicy", alternate = {"BitLockerRemovableDrivePolicy"}) @Expose @Nullable public BitLockerRemovableDrivePolicy bitLockerRemovableDrivePolicy; /** * The Defender Additional Guarded Folders. * List of folder paths to be added to the list of protected folders */ @SerializedName(value = "defenderAdditionalGuardedFolders", alternate = {"DefenderAdditionalGuardedFolders"}) @Expose @Nullable public java.util.List<String> defenderAdditionalGuardedFolders; /** * The Defender Attack Surface Reduction Excluded Paths. * List of exe files and folders to be excluded from attack surface reduction rules */ @SerializedName(value = "defenderAttackSurfaceReductionExcludedPaths", alternate = {"DefenderAttackSurfaceReductionExcludedPaths"}) @Expose @Nullable public java.util.List<String> defenderAttackSurfaceReductionExcludedPaths; /** * The Defender Exploit Protection Xml. * Xml content containing information regarding exploit protection details. */ @SerializedName(value = "defenderExploitProtectionXml", alternate = {"DefenderExploitProtectionXml"}) @Expose @Nullable public byte[] defenderExploitProtectionXml; /** * The Defender Exploit Protection Xml File Name. * Name of the file from which DefenderExploitProtectionXml was obtained. */ @SerializedName(value = "defenderExploitProtectionXmlFileName", alternate = {"DefenderExploitProtectionXmlFileName"}) @Expose @Nullable public String defenderExploitProtectionXmlFileName; /** * The Defender Guarded Folders Allowed App Paths. * List of paths to exe that are allowed to access protected folders */ @SerializedName(value = "defenderGuardedFoldersAllowedAppPaths", alternate = {"DefenderGuardedFoldersAllowedAppPaths"}) @Expose @Nullable public java.util.List<String> defenderGuardedFoldersAllowedAppPaths; /** * The Defender Security Center Block Exploit Protection Override. * Indicates whether or not to block user from overriding Exploit Protection settings. */ @SerializedName(value = "defenderSecurityCenterBlockExploitProtectionOverride", alternate = {"DefenderSecurityCenterBlockExploitProtectionOverride"}) @Expose @Nullable public Boolean defenderSecurityCenterBlockExploitProtectionOverride; /** * The Firewall Block Stateful FTP. * Blocks stateful FTP connections to the device */ @SerializedName(value = "firewallBlockStatefulFTP", alternate = {"FirewallBlockStatefulFTP"}) @Expose @Nullable public Boolean firewallBlockStatefulFTP; /** * The Firewall Certificate Revocation List Check Method. * Specify how the certificate revocation list is to be enforced. Possible values are: deviceDefault, none, attempt, require. */ @SerializedName(value = "firewallCertificateRevocationListCheckMethod", alternate = {"FirewallCertificateRevocationListCheckMethod"}) @Expose @Nullable public FirewallCertificateRevocationListCheckMethodType firewallCertificateRevocationListCheckMethod; /** * The Firewall Idle Timeout For Security Association In Seconds. * Configures the idle timeout for security associations, in seconds, from 300 to 3600 inclusive. This is the period after which security associations will expire and be deleted. Valid values 300 to 3600 */ @SerializedName(value = "firewallIdleTimeoutForSecurityAssociationInSeconds", alternate = {"FirewallIdleTimeoutForSecurityAssociationInSeconds"}) @Expose @Nullable public Integer firewallIdleTimeoutForSecurityAssociationInSeconds; /** * The Firewall IPSec Exemptions Allow DHCP. * Configures IPSec exemptions to allow both IPv4 and IPv6 DHCP traffic */ @SerializedName(value = "firewallIPSecExemptionsAllowDHCP", alternate = {"FirewallIPSecExemptionsAllowDHCP"}) @Expose @Nullable public Boolean firewallIPSecExemptionsAllowDHCP; /** * The Firewall IPSec Exemptions Allow ICMP. * Configures IPSec exemptions to allow ICMP */ @SerializedName(value = "firewallIPSecExemptionsAllowICMP", alternate = {"FirewallIPSecExemptionsAllowICMP"}) @Expose @Nullable public Boolean firewallIPSecExemptionsAllowICMP; /** * The Firewall IPSec Exemptions Allow Neighbor Discovery. * Configures IPSec exemptions to allow neighbor discovery IPv6 ICMP type-codes */ @SerializedName(value = "firewallIPSecExemptionsAllowNeighborDiscovery", alternate = {"FirewallIPSecExemptionsAllowNeighborDiscovery"}) @Expose @Nullable public Boolean firewallIPSecExemptionsAllowNeighborDiscovery; /** * The Firewall IPSec Exemptions Allow Router Discovery. * Configures IPSec exemptions to allow router discovery IPv6 ICMP type-codes */ @SerializedName(value = "firewallIPSecExemptionsAllowRouterDiscovery", alternate = {"FirewallIPSecExemptionsAllowRouterDiscovery"}) @Expose @Nullable public Boolean firewallIPSecExemptionsAllowRouterDiscovery; /** * The Firewall Merge Keying Module Settings. * If an authentication set is not fully supported by a keying module, direct the module to ignore only unsupported authentication suites rather than the entire set */ @SerializedName(value = "firewallMergeKeyingModuleSettings", alternate = {"FirewallMergeKeyingModuleSettings"}) @Expose @Nullable public Boolean firewallMergeKeyingModuleSettings; /** * The Firewall Packet Queueing Method. * Configures how packet queueing should be applied in the tunnel gateway scenario. Possible values are: deviceDefault, disabled, queueInbound, queueOutbound, queueBoth. */ @SerializedName(value = "firewallPacketQueueingMethod", alternate = {"FirewallPacketQueueingMethod"}) @Expose @Nullable public FirewallPacketQueueingMethodType firewallPacketQueueingMethod; /** * The Firewall Pre Shared Key Encoding Method. * Select the preshared key encoding to be used. Possible values are: deviceDefault, none, utF8. */ @SerializedName(value = "firewallPreSharedKeyEncodingMethod", alternate = {"FirewallPreSharedKeyEncodingMethod"}) @Expose @Nullable public FirewallPreSharedKeyEncodingMethodType firewallPreSharedKeyEncodingMethod; /** * The Firewall Profile Domain. * Configures the firewall profile settings for domain networks */ @SerializedName(value = "firewallProfileDomain", alternate = {"FirewallProfileDomain"}) @Expose @Nullable public WindowsFirewallNetworkProfile firewallProfileDomain; /** * The Firewall Profile Private. * Configures the firewall profile settings for private networks */ @SerializedName(value = "firewallProfilePrivate", alternate = {"FirewallProfilePrivate"}) @Expose @Nullable public WindowsFirewallNetworkProfile firewallProfilePrivate; /** * The Firewall Profile Public. * Configures the firewall profile settings for public networks */ @SerializedName(value = "firewallProfilePublic", alternate = {"FirewallProfilePublic"}) @Expose @Nullable public WindowsFirewallNetworkProfile firewallProfilePublic; /** * The Smart Screen Block Override For Files. * Allows IT Admins to control whether users can can ignore SmartScreen warnings and run malicious files. */ @SerializedName(value = "smartScreenBlockOverrideForFiles", alternate = {"SmartScreenBlockOverrideForFiles"}) @Expose @Nullable public Boolean smartScreenBlockOverrideForFiles; /** * The Smart Screen Enable In Shell. * Allows IT Admins to configure SmartScreen for Windows. */ @SerializedName(value = "smartScreenEnableInShell", alternate = {"SmartScreenEnableInShell"}) @Expose @Nullable public Boolean smartScreenEnableInShell; /** * Sets the raw JSON object * * @param serializer the serializer * @param json the JSON object to set this object to */ public void setRawObject(@Nonnull final ISerializer serializer, @Nonnull final JsonObject json) { } }
package org.apache.lucene.index; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.LiveDocsFormat; import org.apache.lucene.codecs.lucene3x.Lucene3xCodec; import org.apache.lucene.codecs.lucene3x.Lucene3xSegmentInfoFormat; import org.apache.lucene.codecs.lucene3x.Lucene3xSegmentInfoReader; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.ChecksumIndexOutput; import org.apache.lucene.store.DataOutput; // javadocs import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.NoSuchDirectoryException; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.ThreadInterruptedException; /** * A collection of segmentInfo objects with methods for operating on * those segments in relation to the file system. * <p> * The active segments in the index are stored in the segment info file, * <tt>segments_N</tt>. There may be one or more <tt>segments_N</tt> files in the * index; however, the one with the largest generation is the active one (when * older segments_N files are present it's because they temporarily cannot be * deleted, or, a writer is in the process of committing, or a custom * {@link org.apache.lucene.index.IndexDeletionPolicy IndexDeletionPolicy} * is in use). This file lists each segment by name and has details about the * codec and generation of deletes. * </p> * <p>There is also a file <tt>segments.gen</tt>. This file contains * the current generation (the <tt>_N</tt> in <tt>segments_N</tt>) of the index. * This is used only as a fallback in case the current generation cannot be * accurately determined by directory listing alone (as is the case for some NFS * clients with time-based directory cache expiration). This file simply contains * an {@link DataOutput#writeInt Int32} version header * ({@link #FORMAT_SEGMENTS_GEN_CURRENT}), followed by the * generation recorded as {@link DataOutput#writeLong Int64}, written twice.</p> * <p> * Files: * <ul> * <li><tt>segments.gen</tt>: GenHeader, Generation, Generation * <li><tt>segments_N</tt>: Header, Version, NameCounter, SegCount, * &lt;SegName, SegCodec, DelGen, DeletionCount&gt;<sup>SegCount</sup>, * CommitUserData, Checksum * </ul> * </p> * Data types: * <p> * <ul> * <li>Header --&gt; {@link CodecUtil#writeHeader CodecHeader}</li> * <li>GenHeader, NameCounter, SegCount, DeletionCount --&gt; {@link DataOutput#writeInt Int32}</li> * <li>Generation, Version, DelGen, Checksum --&gt; {@link DataOutput#writeLong Int64}</li> * <li>SegName, SegCodec --&gt; {@link DataOutput#writeString String}</li> * <li>CommitUserData --&gt; {@link DataOutput#writeStringStringMap Map&lt;String,String&gt;}</li> * </ul> * </p> * Field Descriptions: * <p> * <ul> * <li>Version counts how often the index has been changed by adding or deleting * documents.</li> * <li>NameCounter is used to generate names for new segment files.</li> * <li>SegName is the name of the segment, and is used as the file name prefix for * all of the files that compose the segment's index.</li> * <li>DelGen is the generation count of the deletes file. If this is -1, * there are no deletes. Anything above zero means there are deletes * stored by {@link LiveDocsFormat}.</li> * <li>DeletionCount records the number of deleted documents in this segment.</li> * <li>Checksum contains the CRC32 checksum of all bytes in the segments_N file up * until the checksum. This is used to verify integrity of the file on opening the * index.</li> * <li>SegCodec is the {@link Codec#getName() name} of the Codec that encoded * this segment.</li> * <li>CommitUserData stores an optional user-supplied opaque * Map&lt;String,String&gt; that was passed to {@link IndexWriter#commit(java.util.Map)} * or {@link IndexWriter#prepareCommit(java.util.Map)}.</li> * </ul> * </p> * * @lucene.experimental */ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfoPerCommit> { /** * The file format version for the segments_N codec header */ public static final int VERSION_40 = 0; /** Used for the segments.gen file only! * Whenever you add a new format, make it 1 smaller (negative version logic)! */ public static final int FORMAT_SEGMENTS_GEN_CURRENT = -2; /** Used to name new segments. */ public int counter; /** Counts how often the index has been changed. */ public long version; private long generation; // generation of the "segments_N" for the next commit private long lastGeneration; // generation of the "segments_N" file we last successfully read // or wrote; this is normally the same as generation except if // there was an IOException that had interrupted a commit /** Opaque Map&lt;String, String&gt; that user can specify during IndexWriter.commit */ public Map<String,String> userData = Collections.<String,String>emptyMap(); private List<SegmentInfoPerCommit> segments = new ArrayList<SegmentInfoPerCommit>(); /** * If non-null, information about loading segments_N files * will be printed here. @see #setInfoStream. */ private static PrintStream infoStream = null; /** Sole constructor. Typically you call this and then * use {@link #read(Directory) or * #read(Directory,String)} to populate each {@link * SegmentInfoPerCommit}. Alternatively, you can add/remove your * own {@link SegmentInfoPerCommit}s. */ public SegmentInfos() { } /** Returns {@link SegmentInfoPerCommit} at the provided * index. */ public SegmentInfoPerCommit info(int i) { return segments.get(i); } /** * Get the generation of the most recent commit to the * list of index files (N in the segments_N file). * * @param files -- array of file names to check */ public static long getLastCommitGeneration(String[] files) { if (files == null) { return -1; } long max = -1; for (String file : files) { if (file.startsWith(IndexFileNames.SEGMENTS) && !file.equals(IndexFileNames.SEGMENTS_GEN)) { long gen = generationFromSegmentsFileName(file); if (gen > max) { max = gen; } } } return max; } /** * Get the generation of the most recent commit to the * index in this directory (N in the segments_N file). * * @param directory -- directory to search for the latest segments_N file */ public static long getLastCommitGeneration(Directory directory) throws IOException { try { return getLastCommitGeneration(directory.listAll()); } catch (NoSuchDirectoryException nsde) { return -1; } } /** * Get the filename of the segments_N file for the most * recent commit in the list of index files. * * @param files -- array of file names to check */ public static String getLastCommitSegmentsFileName(String[] files) { return IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", getLastCommitGeneration(files)); } /** * Get the filename of the segments_N file for the most * recent commit to the index in this Directory. * * @param directory -- directory to search for the latest segments_N file */ public static String getLastCommitSegmentsFileName(Directory directory) throws IOException { return IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", getLastCommitGeneration(directory)); } /** * Get the segments_N filename in use by this segment infos. */ public String getSegmentsFileName() { return IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", lastGeneration); } /** * Parse the generation off the segments file name and * return it. */ public static long generationFromSegmentsFileName(String fileName) { if (fileName.equals(IndexFileNames.SEGMENTS)) { return 0; } else if (fileName.startsWith(IndexFileNames.SEGMENTS)) { return Long.parseLong(fileName.substring(1+IndexFileNames.SEGMENTS.length()), Character.MAX_RADIX); } else { throw new IllegalArgumentException("fileName \"" + fileName + "\" is not a segments file"); } } /** * Get the next segments_N filename that will be written. */ public String getNextSegmentFileName() { long nextGeneration; if (generation == -1) { nextGeneration = 1; } else { nextGeneration = generation+1; } return IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", nextGeneration); } /** * Read a particular segmentFileName. Note that this may * throw an IOException if a commit is in process. * * @param directory -- directory containing the segments file * @param segmentFileName -- segment file to load * @throws CorruptIndexException if the index is corrupt * @throws IOException if there is a low-level IO error */ public final void read(Directory directory, String segmentFileName) throws IOException { boolean success = false; // Clear any previous segments: this.clear(); generation = generationFromSegmentsFileName(segmentFileName); lastGeneration = generation; ChecksumIndexInput input = new ChecksumIndexInput(directory.openInput(segmentFileName, IOContext.READ)); try { final int format = input.readInt(); if (format == CodecUtil.CODEC_MAGIC) { // 4.0+ CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_40, VERSION_40); version = input.readLong(); counter = input.readInt(); int numSegments = input.readInt(); if (numSegments < 0) { throw new CorruptIndexException("invalid segment count: " + numSegments + " (resource: " + input + ")"); } for(int seg=0;seg<numSegments;seg++) { String segName = input.readString(); Codec codec = Codec.forName(input.readString()); //System.out.println("SIS.read seg=" + seg + " codec=" + codec); SegmentInfo info = codec.segmentInfoFormat().getSegmentInfoReader().read(directory, segName, IOContext.READ); info.setCodec(codec); long delGen = input.readLong(); int delCount = input.readInt(); if (delCount < 0 || delCount > info.getDocCount()) { throw new CorruptIndexException("invalid deletion count: " + delCount + " (resource: " + input + ")"); } add(new SegmentInfoPerCommit(info, delCount, delGen)); } userData = input.readStringStringMap(); } else { Lucene3xSegmentInfoReader.readLegacyInfos(this, directory, input, format); Codec codec = Codec.forName("Lucene3x"); for (SegmentInfoPerCommit info : this) { info.info.setCodec(codec); } } final long checksumNow = input.getChecksum(); final long checksumThen = input.readLong(); if (checksumNow != checksumThen) { throw new CorruptIndexException("checksum mismatch in segments file (resource: " + input + ")"); } success = true; } finally { if (!success) { // Clear any segment infos we had loaded so we // have a clean slate on retry: this.clear(); IOUtils.closeWhileHandlingException(input); } else { input.close(); } } } /** Find the latest commit ({@code segments_N file}) and * load all {@link SegmentInfoPerCommit}s. */ public final void read(Directory directory) throws IOException { generation = lastGeneration = -1; new FindSegmentsFile(directory) { @Override protected Object doBody(String segmentFileName) throws IOException { read(directory, segmentFileName); return null; } }.run(); } // Only non-null after prepareCommit has been called and // before finishCommit is called ChecksumIndexOutput pendingSegnOutput; private static final String SEGMENT_INFO_UPGRADE_CODEC = "SegmentInfo3xUpgrade"; private static final int SEGMENT_INFO_UPGRADE_VERSION = 0; private void write(Directory directory) throws IOException { String segmentsFileName = getNextSegmentFileName(); // Always advance the generation on write: if (generation == -1) { generation = 1; } else { generation++; } ChecksumIndexOutput segnOutput = null; boolean success = false; final Set<String> upgradedSIFiles = new HashSet<String>(); try { segnOutput = new ChecksumIndexOutput(directory.createOutput(segmentsFileName, IOContext.DEFAULT)); CodecUtil.writeHeader(segnOutput, "segments", VERSION_40); segnOutput.writeLong(version); segnOutput.writeInt(counter); // write counter segnOutput.writeInt(size()); // write infos for (SegmentInfoPerCommit siPerCommit : this) { SegmentInfo si = siPerCommit.info; segnOutput.writeString(si.name); segnOutput.writeString(si.getCodec().getName()); segnOutput.writeLong(siPerCommit.getDelGen()); segnOutput.writeInt(siPerCommit.getDelCount()); assert si.dir == directory; assert siPerCommit.getDelCount() <= si.getDocCount(); // If this segment is pre-4.x, perform a one-time // "ugprade" to write the .si file for it: String version = si.getVersion(); if (version == null || StringHelper.getVersionComparator().compare(version, "4.0") < 0) { if (!segmentWasUpgraded(directory, si)) { String markerFileName = IndexFileNames.segmentFileName(si.name, "upgraded", Lucene3xSegmentInfoFormat.UPGRADED_SI_EXTENSION); si.addFile(markerFileName); final String segmentFileName = write3xInfo(directory, si, IOContext.DEFAULT); upgradedSIFiles.add(segmentFileName); directory.sync(Collections.singletonList(segmentFileName)); // Write separate marker file indicating upgrade // is completed. This way, if there is a JVM // kill/crash, OS crash, power loss, etc. while // writing the upgraded file, the marker file // will be missing: si.addFile(markerFileName); IndexOutput out = directory.createOutput(markerFileName, IOContext.DEFAULT); try { CodecUtil.writeHeader(out, SEGMENT_INFO_UPGRADE_CODEC, SEGMENT_INFO_UPGRADE_VERSION); } finally { out.close(); } upgradedSIFiles.add(markerFileName); directory.sync(Collections.singletonList(markerFileName)); } } } segnOutput.writeStringStringMap(userData); pendingSegnOutput = segnOutput; success = true; } finally { if (!success) { // We hit an exception above; try to close the file // but suppress any exception: IOUtils.closeWhileHandlingException(segnOutput); for(String fileName : upgradedSIFiles) { try { directory.deleteFile(fileName); } catch (Throwable t) { // Suppress so we keep throwing the original exception } } try { // Try not to leave a truncated segments_N file in // the index: directory.deleteFile(segmentsFileName); } catch (Throwable t) { // Suppress so we keep throwing the original exception } } } } private static boolean segmentWasUpgraded(Directory directory, SegmentInfo si) { // Check marker file: String markerFileName = IndexFileNames.segmentFileName(si.name, "upgraded", Lucene3xSegmentInfoFormat.UPGRADED_SI_EXTENSION); IndexInput in = null; try { in = directory.openInput(markerFileName, IOContext.READONCE); if (CodecUtil.checkHeader(in, SEGMENT_INFO_UPGRADE_CODEC, SEGMENT_INFO_UPGRADE_VERSION, SEGMENT_INFO_UPGRADE_VERSION) == 0) { return true; } } catch (IOException ioe) { // Ignore: if something is wrong w/ the marker file, // we will just upgrade again } finally { if (in != null) { IOUtils.closeWhileHandlingException(in); } } return false; } @Deprecated public static String write3xInfo(Directory dir, SegmentInfo si, IOContext context) throws IOException { // NOTE: this is NOT how 3.x is really written... String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene3xSegmentInfoFormat.UPGRADED_SI_EXTENSION); si.addFile(fileName); //System.out.println("UPGRADE write " + fileName); boolean success = false; IndexOutput output = dir.createOutput(fileName, context); try { // we are about to write this SI in 3.x format, dropping all codec information, etc. // so it had better be a 3.x segment or you will get very confusing errors later. assert si.getCodec() instanceof Lucene3xCodec : "broken test, trying to mix preflex with other codecs"; CodecUtil.writeHeader(output, Lucene3xSegmentInfoFormat.UPGRADED_SI_CODEC_NAME, Lucene3xSegmentInfoFormat.UPGRADED_SI_VERSION_CURRENT); // Write the Lucene version that created this segment, since 3.1 output.writeString(si.getVersion()); output.writeInt(si.getDocCount()); output.writeStringStringMap(si.attributes()); output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); output.writeStringStringMap(si.getDiagnostics()); output.writeStringSet(si.files()); output.close(); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(output); try { si.dir.deleteFile(fileName); } catch (Throwable t) { // Suppress so we keep throwing the original exception } } } return fileName; } /** * Returns a copy of this instance, also copying each * SegmentInfo. */ @Override public SegmentInfos clone() { try { final SegmentInfos sis = (SegmentInfos) super.clone(); // deep clone, first recreate all collections: sis.segments = new ArrayList<SegmentInfoPerCommit>(size()); for(final SegmentInfoPerCommit info : this) { assert info.info.getCodec() != null; // dont directly access segments, use add method!!! sis.add(info.clone()); } sis.userData = new HashMap<String,String>(userData); return sis; } catch (CloneNotSupportedException e) { throw new RuntimeException("should not happen", e); } } /** * version number when this SegmentInfos was generated. */ public long getVersion() { return version; } /** Returns current generation. */ public long getGeneration() { return generation; } /** Returns last succesfully read or written generation. */ public long getLastGeneration() { return lastGeneration; } /** If non-null, information about retries when loading * the segments file will be printed to this. */ public static void setInfoStream(PrintStream infoStream) { SegmentInfos.infoStream = infoStream; } /* Advanced configuration of retry logic in loading segments_N file */ private static int defaultGenLookaheadCount = 10; /** * Advanced: set how many times to try incrementing the * gen when loading the segments file. This only runs if * the primary (listing directory) and secondary (opening * segments.gen file) methods fail to find the segments * file. * * @lucene.experimental */ public static void setDefaultGenLookaheadCount(int count) { defaultGenLookaheadCount = count; } /** * Returns the {@code defaultGenLookaheadCount}. * * @see #setDefaultGenLookaheadCount * * @lucene.experimental */ public static int getDefaultGenLookahedCount() { return defaultGenLookaheadCount; } /** * Returns {@code infoStream}. * * @see #setInfoStream */ public static PrintStream getInfoStream() { return infoStream; } /** * Prints the given message to the infoStream. Note, this method does not * check for null infoStream. It assumes this check has been performed by the * caller, which is recommended to avoid the (usually) expensive message * creation. */ private static void message(String message) { infoStream.println("SIS [" + Thread.currentThread().getName() + "]: " + message); } /** * Utility class for executing code that needs to do * something with the current segments file. This is * necessary with lock-less commits because from the time * you locate the current segments file name, until you * actually open it, read its contents, or check modified * time, etc., it could have been deleted due to a writer * commit finishing. */ public abstract static class FindSegmentsFile { final Directory directory; /** Sole constructor. */ public FindSegmentsFile(Directory directory) { this.directory = directory; } /** Locate the most recent {@code segments} file and * run {@link #doBody} on it. */ public Object run() throws IOException { return run(null); } /** Run {@link #doBody} on the provided commit. */ public Object run(IndexCommit commit) throws IOException { if (commit != null) { if (directory != commit.getDirectory()) throw new IOException("the specified commit does not match the specified Directory"); return doBody(commit.getSegmentsFileName()); } String segmentFileName = null; long lastGen = -1; long gen = 0; int genLookaheadCount = 0; IOException exc = null; int retryCount = 0; boolean useFirstMethod = true; // Loop until we succeed in calling doBody() without // hitting an IOException. An IOException most likely // means a commit was in process and has finished, in // the time it took us to load the now-old infos files // (and segments files). It's also possible it's a // true error (corrupt index). To distinguish these, // on each retry we must see "forward progress" on // which generation we are trying to load. If we // don't, then the original error is real and we throw // it. // We have three methods for determining the current // generation. We try the first two in parallel (when // useFirstMethod is true), and fall back to the third // when necessary. while(true) { if (useFirstMethod) { // List the directory and use the highest // segments_N file. This method works well as long // as there is no stale caching on the directory // contents (NOTE: NFS clients often have such stale // caching): String[] files = null; long genA = -1; files = directory.listAll(); if (files != null) { genA = getLastCommitGeneration(files); } if (infoStream != null) { message("directory listing genA=" + genA); } // Also open segments.gen and read its // contents. Then we take the larger of the two // gens. This way, if either approach is hitting // a stale cache (NFS) we have a better chance of // getting the right generation. long genB = -1; IndexInput genInput = null; try { genInput = directory.openInput(IndexFileNames.SEGMENTS_GEN, IOContext.READONCE); } catch (FileNotFoundException e) { if (infoStream != null) { message("segments.gen open: FileNotFoundException " + e); } } catch (IOException e) { if (infoStream != null) { message("segments.gen open: IOException " + e); } } if (genInput != null) { try { int version = genInput.readInt(); if (version == FORMAT_SEGMENTS_GEN_CURRENT) { long gen0 = genInput.readLong(); long gen1 = genInput.readLong(); if (infoStream != null) { message("fallback check: " + gen0 + "; " + gen1); } if (gen0 == gen1) { // The file is consistent. genB = gen0; } } else { throw new IndexFormatTooNewException(genInput, version, FORMAT_SEGMENTS_GEN_CURRENT, FORMAT_SEGMENTS_GEN_CURRENT); } } catch (IOException err2) { // rethrow any format exception if (err2 instanceof CorruptIndexException) throw err2; } finally { genInput.close(); } } if (infoStream != null) { message(IndexFileNames.SEGMENTS_GEN + " check: genB=" + genB); } // Pick the larger of the two gen's: gen = Math.max(genA, genB); if (gen == -1) { // Neither approach found a generation throw new IndexNotFoundException("no segments* file found in " + directory + ": files: " + Arrays.toString(files)); } } if (useFirstMethod && lastGen == gen && retryCount >= 2) { // Give up on first method -- this is 3rd cycle on // listing directory and checking gen file to // attempt to locate the segments file. useFirstMethod = false; } // Second method: since both directory cache and // file contents cache seem to be stale, just // advance the generation. if (!useFirstMethod) { if (genLookaheadCount < defaultGenLookaheadCount) { gen++; genLookaheadCount++; if (infoStream != null) { message("look ahead increment gen to " + gen); } } else { // All attempts have failed -- throw first exc: throw exc; } } else if (lastGen == gen) { // This means we're about to try the same // segments_N last tried. retryCount++; } else { // Segment file has advanced since our last loop // (we made "progress"), so reset retryCount: retryCount = 0; } lastGen = gen; segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen); try { Object v = doBody(segmentFileName); if (infoStream != null) { message("success on " + segmentFileName); } return v; } catch (IOException err) { // Save the original root cause: if (exc == null) { exc = err; } if (infoStream != null) { message("primary Exception on '" + segmentFileName + "': " + err + "'; will retry: retryCount=" + retryCount + "; gen = " + gen); } if (gen > 1 && useFirstMethod && retryCount == 1) { // This is our second time trying this same segments // file (because retryCount is 1), and, there is // possibly a segments_(N-1) (because gen > 1). // So, check if the segments_(N-1) exists and // try it if so: String prevSegmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen-1); final boolean prevExists; prevExists = directory.fileExists(prevSegmentFileName); if (prevExists) { if (infoStream != null) { message("fallback to prior segment file '" + prevSegmentFileName + "'"); } try { Object v = doBody(prevSegmentFileName); if (infoStream != null) { message("success on fallback " + prevSegmentFileName); } return v; } catch (IOException err2) { if (infoStream != null) { message("secondary Exception on '" + prevSegmentFileName + "': " + err2 + "'; will retry"); } } } } } } } /** * Subclass must implement this. The assumption is an * IOException will be thrown if something goes wrong * during the processing that could have been caused by * a writer committing. */ protected abstract Object doBody(String segmentFileName) throws IOException; } // Carry over generation numbers from another SegmentInfos void updateGeneration(SegmentInfos other) { lastGeneration = other.lastGeneration; generation = other.generation; } final void rollbackCommit(Directory dir) { if (pendingSegnOutput != null) { // Suppress so we keep throwing the original exception // in our caller IOUtils.closeWhileHandlingException(pendingSegnOutput); pendingSegnOutput = null; // Must carefully compute fileName from "generation" // since lastGeneration isn't incremented: final String segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", generation); // Suppress so we keep throwing the original exception // in our caller IOUtils.deleteFilesIgnoringExceptions(dir, segmentFileName); } } /** Call this to start a commit. This writes the new * segments file, but writes an invalid checksum at the * end, so that it is not visible to readers. Once this * is called you must call {@link #finishCommit} to complete * the commit or {@link #rollbackCommit} to abort it. * <p> * Note: {@link #changed()} should be called prior to this * method if changes have been made to this {@link SegmentInfos} instance * </p> **/ final void prepareCommit(Directory dir) throws IOException { if (pendingSegnOutput != null) { throw new IllegalStateException("prepareCommit was already called"); } write(dir); } /** Returns all file names referenced by SegmentInfo * instances matching the provided Directory (ie files * associated with any "external" segments are skipped). * The returned collection is recomputed on each * invocation. */ public Collection<String> files(Directory dir, boolean includeSegmentsFile) throws IOException { HashSet<String> files = new HashSet<String>(); if (includeSegmentsFile) { final String segmentFileName = getSegmentsFileName(); if (segmentFileName != null) { /* * TODO: if lastGen == -1 we get might get null here it seems wrong to * add null to the files set */ files.add(segmentFileName); } } final int size = size(); for(int i=0;i<size;i++) { final SegmentInfoPerCommit info = info(i); assert info.info.dir == dir; if (info.info.dir == dir) { files.addAll(info.files()); } } return files; } final void finishCommit(Directory dir) throws IOException { if (pendingSegnOutput == null) { throw new IllegalStateException("prepareCommit was not called"); } boolean success = false; try { pendingSegnOutput.finishCommit(); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(pendingSegnOutput); rollbackCommit(dir); } else { success = false; try { pendingSegnOutput.close(); success = true; } finally { if (!success) { final String segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", generation); IOUtils.deleteFilesIgnoringExceptions(dir, segmentFileName); } pendingSegnOutput = null; } } } // NOTE: if we crash here, we have left a segments_N // file in the directory in a possibly corrupt state (if // some bytes made it to stable storage and others // didn't). But, the segments_N file includes checksum // at the end, which should catch this case. So when a // reader tries to read it, it will throw a // CorruptIndexException, which should cause the retry // logic in SegmentInfos to kick in and load the last // good (previous) segments_N-1 file. final String fileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", generation); success = false; try { dir.sync(Collections.singleton(fileName)); success = true; } finally { if (!success) { try { dir.deleteFile(fileName); } catch (Throwable t) { // Suppress so we keep throwing the original exception } } } lastGeneration = generation; try { IndexOutput genOutput = dir.createOutput(IndexFileNames.SEGMENTS_GEN, IOContext.READONCE); try { genOutput.writeInt(FORMAT_SEGMENTS_GEN_CURRENT); genOutput.writeLong(generation); genOutput.writeLong(generation); } finally { genOutput.close(); dir.sync(Collections.singleton(IndexFileNames.SEGMENTS_GEN)); } } catch (Throwable t) { // It's OK if we fail to write this file since it's // used only as one of the retry fallbacks. try { dir.deleteFile(IndexFileNames.SEGMENTS_GEN); } catch (Throwable t2) { // Ignore; this file is only used in a retry // fallback on init. } if (t instanceof ThreadInterruptedException) { throw (ThreadInterruptedException) t; } } } /** Writes & syncs to the Directory dir, taking care to * remove the segments file on exception * <p> * Note: {@link #changed()} should be called prior to this * method if changes have been made to this {@link SegmentInfos} instance * </p> **/ final void commit(Directory dir) throws IOException { prepareCommit(dir); finishCommit(dir); } /** Returns readable description of this segment. */ public String toString(Directory directory) { StringBuilder buffer = new StringBuilder(); buffer.append(getSegmentsFileName()).append(": "); final int count = size(); for(int i = 0; i < count; i++) { if (i > 0) { buffer.append(' '); } final SegmentInfoPerCommit info = info(i); buffer.append(info.toString(directory, 0)); } return buffer.toString(); } /** Return {@code userData} saved with this commit. * * @see IndexWriter#commit(Map) */ public Map<String,String> getUserData() { return userData; } void setUserData(Map<String,String> data) { if (data == null) { userData = Collections.<String,String>emptyMap(); } else { userData = data; } } /** Replaces all segments in this instance, but keeps * generation, version, counter so that future commits * remain write once. */ void replace(SegmentInfos other) { rollbackSegmentInfos(other.asList()); lastGeneration = other.lastGeneration; } /** Returns sum of all segment's docCounts. Note that * this does not include deletions */ public int totalDocCount() { int count = 0; for(SegmentInfoPerCommit info : this) { count += info.info.getDocCount(); } return count; } /** Call this before committing if changes have been made to the * segments. */ public void changed() { version++; } /** applies all changes caused by committing a merge to this SegmentInfos */ void applyMergeChanges(MergePolicy.OneMerge merge, boolean dropSegment) { final Set<SegmentInfoPerCommit> mergedAway = new HashSet<SegmentInfoPerCommit>(merge.segments); boolean inserted = false; int newSegIdx = 0; for (int segIdx = 0, cnt = segments.size(); segIdx < cnt; segIdx++) { assert segIdx >= newSegIdx; final SegmentInfoPerCommit info = segments.get(segIdx); if (mergedAway.contains(info)) { if (!inserted && !dropSegment) { segments.set(segIdx, merge.info); inserted = true; newSegIdx++; } } else { segments.set(newSegIdx, info); newSegIdx++; } } // the rest of the segments in list are duplicates, so don't remove from map, only list! segments.subList(newSegIdx, segments.size()).clear(); // Either we found place to insert segment, or, we did // not, but only because all segments we merged becamee // deleted while we are merging, in which case it should // be the case that the new segment is also all deleted, // we insert it at the beginning if it should not be dropped: if (!inserted && !dropSegment) { segments.add(0, merge.info); } } List<SegmentInfoPerCommit> createBackupSegmentInfos() { final List<SegmentInfoPerCommit> list = new ArrayList<SegmentInfoPerCommit>(size()); for(final SegmentInfoPerCommit info : this) { assert info.info.getCodec() != null; list.add(info.clone()); } return list; } void rollbackSegmentInfos(List<SegmentInfoPerCommit> infos) { this.clear(); this.addAll(infos); } /** Returns an <b>unmodifiable</b> {@link Iterator} of contained segments in order. */ // @Override (comment out until Java 6) public Iterator<SegmentInfoPerCommit> iterator() { return asList().iterator(); } /** Returns all contained segments as an <b>unmodifiable</b> {@link List} view. */ public List<SegmentInfoPerCommit> asList() { return Collections.unmodifiableList(segments); } /** Returns number of {@link SegmentInfoPerCommit}s. */ public int size() { return segments.size(); } /** Appends the provided {@link SegmentInfoPerCommit}. */ public void add(SegmentInfoPerCommit si) { segments.add(si); } /** Appends the provided {@link SegmentInfoPerCommit}s. */ public void addAll(Iterable<SegmentInfoPerCommit> sis) { for (final SegmentInfoPerCommit si : sis) { this.add(si); } } /** Clear all {@link SegmentInfoPerCommit}s. */ public void clear() { segments.clear(); } /** Remove the provided {@link SegmentInfoPerCommit}. * * <p><b>WARNING</b>: O(N) cost */ public void remove(SegmentInfoPerCommit si) { segments.remove(si); } /** Remove the {@link SegmentInfoPerCommit} at the * provided index. * * <p><b>WARNING</b>: O(N) cost */ void remove(int index) { segments.remove(index); } /** Return true if the provided {@link * SegmentInfoPerCommit} is contained. * * <p><b>WARNING</b>: O(N) cost */ boolean contains(SegmentInfoPerCommit si) { return segments.contains(si); } /** Returns index of the provided {@link * SegmentInfoPerCommit}. * * <p><b>WARNING</b>: O(N) cost */ int indexOf(SegmentInfoPerCommit si) { return segments.indexOf(si); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cluster; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteCompute; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.events.DiscoveryEvent; import org.apache.ignite.events.Event; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.cluster.ClusterGroupAdapter; import org.apache.ignite.internal.managers.discovery.CustomEventListener; import org.apache.ignite.internal.managers.eventstorage.GridLocalEventListener; import org.apache.ignite.internal.pagemem.store.IgnitePageStoreManager; import org.apache.ignite.internal.processors.GridProcessorAdapter; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.ChangeGlobalStateMessage; import org.apache.ignite.internal.processors.cache.ClusterState; import org.apache.ignite.internal.processors.cache.DynamicCacheChangeBatch; import org.apache.ignite.internal.processors.cache.DynamicCacheChangeRequest; import org.apache.ignite.internal.processors.cache.ExchangeActions; import org.apache.ignite.internal.processors.cache.GridCacheProcessor; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.GridChangeGlobalStateMessageResponse; import org.apache.ignite.internal.util.future.GridFinishedFuture; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.CI2; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.lang.IgniteRunnable; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.spi.discovery.DiscoveryDataBag; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.events.EventType.EVT_NODE_FAILED; import static org.apache.ignite.events.EventType.EVT_NODE_LEFT; import static org.apache.ignite.internal.managers.communication.GridIoPolicy.SYSTEM_POOL; import static org.apache.ignite.internal.processors.cache.ClusterState.ACTIVE; import static org.apache.ignite.internal.processors.cache.ClusterState.INACTIVE; import static org.apache.ignite.internal.processors.cache.ClusterState.TRANSITION; /** * */ public class GridClusterStateProcessor extends GridProcessorAdapter { /** Global status. */ private volatile ClusterState globalState; /** Action context. */ private volatile ChangeGlobalStateContext lastCgsCtx; /** Local action future. */ private final AtomicReference<GridChangeGlobalStateFuture> cgsLocFut = new AtomicReference<>(); /** Process. */ @GridToStringExclude private GridCacheProcessor cacheProc; /** Shared context. */ @GridToStringExclude private GridCacheSharedContext<?, ?> sharedCtx; //todo may be add init latch /** Listener. */ private final GridLocalEventListener lsr = new GridLocalEventListener() { @Override public void onEvent(Event evt) { assert evt != null; final DiscoveryEvent e = (DiscoveryEvent)evt; assert e.type() == EVT_NODE_LEFT || e.type() == EVT_NODE_FAILED : this; final GridChangeGlobalStateFuture f = cgsLocFut.get(); if (f != null) f.initFut.listen(new CI1<IgniteInternalFuture>() { @Override public void apply(IgniteInternalFuture fut) { f.onDiscoveryEvent(e); } }); } }; /** * @param ctx Kernal context. */ public GridClusterStateProcessor(GridKernalContext ctx) { super(ctx); } /** {@inheritDoc} */ @Override public void start(boolean activeOnStart) throws IgniteCheckedException { super.start(activeOnStart); globalState = activeOnStart ? ACTIVE : INACTIVE; cacheProc = ctx.cache(); sharedCtx = cacheProc.context(); sharedCtx.io().addCacheHandler(0, GridChangeGlobalStateMessageResponse.class, new CI2<UUID, GridChangeGlobalStateMessageResponse>() { @Override public void apply(UUID nodeId, GridChangeGlobalStateMessageResponse msg) { processChangeGlobalStateResponse(nodeId, msg); } }); ctx.discovery().setCustomEventListener( ChangeGlobalStateMessage.class, new CustomEventListener<ChangeGlobalStateMessage>() { @Override public void onCustomEvent( AffinityTopologyVersion topVer, ClusterNode snd, ChangeGlobalStateMessage msg) { assert topVer != null; assert snd != null; assert msg != null; boolean activate = msg.activate(); ChangeGlobalStateContext actx = lastCgsCtx; if (actx != null && globalState == TRANSITION) { GridChangeGlobalStateFuture f = cgsLocFut.get(); if (log.isDebugEnabled()) log.debug("Concurrent " + prettyStr(activate) + " [id=" + ctx.localNodeId() + " topVer=" + topVer + " actx=" + actx + ", msg=" + msg + "]"); if (f != null && f.requestId.equals(msg.requestId())) f.onDone(new IgniteCheckedException( "Concurrent change state, now in progress=" + (activate) + ", initiatingNodeId=" + actx.initiatingNodeId + ", you try=" + (prettyStr(activate)) + ", locNodeId=" + ctx.localNodeId() )); msg.concurrentChangeState(); } else { if (log.isInfoEnabled()) log.info("Create " + prettyStr(activate) + " context [id=" + ctx.localNodeId() + " topVer=" + topVer + ", reqId=" + msg.requestId() + ", initiatingNodeId=" + msg.initiatorNodeId() + "]"); lastCgsCtx = new ChangeGlobalStateContext( msg.requestId(), msg.initiatorNodeId(), msg.getDynamicCacheChangeBatch(), msg.activate()); globalState = TRANSITION; } } }); ctx.event().addLocalEventListener(lsr, EVT_NODE_LEFT, EVT_NODE_FAILED); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { super.stop(cancel); sharedCtx.io().removeHandler(false, 0, GridChangeGlobalStateMessageResponse.class); ctx.event().removeLocalEventListener(lsr, EVT_NODE_LEFT, EVT_NODE_FAILED); IgniteCheckedException stopErr = new IgniteInterruptedCheckedException( "Node is stopping: " + ctx.igniteInstanceName()); GridChangeGlobalStateFuture f = cgsLocFut.get(); if (f != null) f.onDone(stopErr); cgsLocFut.set(null); } /** {@inheritDoc} */ @Nullable @Override public DiscoveryDataExchangeType discoveryDataType() { return DiscoveryDataExchangeType.STATE_PROC; } /** {@inheritDoc} */ @Override public void collectGridNodeData(DiscoveryDataBag dataBag) { dataBag.addGridCommonData(DiscoveryDataExchangeType.STATE_PROC.ordinal(), globalState); } /** {@inheritDoc} */ @Override public void onGridDataReceived(DiscoveryDataBag.GridDiscoveryData data) { ClusterState state = (ClusterState)data.commonData(); if (state != null) globalState = state; } /** * */ public IgniteInternalFuture<?> changeGlobalState(final boolean activate) { if (cacheProc.transactions().tx() != null || sharedCtx.lockedTopologyVersion(null) != null) throw new IgniteException("Cannot " + prettyStr(activate) + " cluster, because cache locked on transaction."); if ((globalState == ACTIVE && activate) || (this.globalState == INACTIVE && !activate)) return new GridFinishedFuture<>(); final UUID requestId = UUID.randomUUID(); final GridChangeGlobalStateFuture cgsFut = new GridChangeGlobalStateFuture(requestId, activate, ctx); if (!cgsLocFut.compareAndSet(null, cgsFut)) { GridChangeGlobalStateFuture locF = cgsLocFut.get(); if (locF.activate == activate) return locF; else return new GridFinishedFuture<>(new IgniteException( "fail " + prettyStr(activate) + ", because now in progress" + prettyStr(locF.activate))); } try { if (ctx.clientNode()) { AffinityTopologyVersion topVer = ctx.discovery().topologyVersionEx(); IgniteCompute comp = ((ClusterGroupAdapter)ctx.cluster().get().forServers()) .compute().withAsync(); if (log.isInfoEnabled()) log.info("Send " + prettyStr(activate) + " request from client node [id=" + ctx.localNodeId() + " topVer=" + topVer + " ]"); comp.run(new ClientChangeGlobalStateComputeRequest(activate)); comp.future().listen(new CI1<IgniteFuture>() { @Override public void apply(IgniteFuture fut) { try { fut.get(); cgsFut.onDone(); } catch (Exception e) { cgsFut.onDone(e); } } }); } else { List<DynamicCacheChangeRequest> reqs = new ArrayList<>(); DynamicCacheChangeRequest changeGlobalStateReq = new DynamicCacheChangeRequest( requestId, activate ? ACTIVE : INACTIVE, ctx.localNodeId()); reqs.add(changeGlobalStateReq); reqs.addAll(activate ? cacheProc.startAllCachesRequests() : cacheProc.stopAllCachesRequests()); ChangeGlobalStateMessage changeGlobalStateMsg = new ChangeGlobalStateMessage( requestId, ctx.localNodeId(), activate, new DynamicCacheChangeBatch(reqs)); try { ctx.discovery().sendCustomEvent(changeGlobalStateMsg); if (ctx.isStopping()) cgsFut.onDone(new IgniteCheckedException("Failed to execute " + prettyStr(activate) + " request, " + "node is stopping.")); } catch (IgniteCheckedException e) { log.error("Fail create or send change global state request." + cgsFut, e); cgsFut.onDone(e); } } } catch (IgniteCheckedException e) { log.error("Fail create or send change global state request." + cgsFut, e); cgsFut.onDone(e); } return cgsFut; } /** * */ public boolean active() { ChangeGlobalStateContext actx = lastCgsCtx; if (actx != null && !actx.activate && globalState == TRANSITION) return true; if (actx != null && actx.activate && globalState == TRANSITION) return false; return globalState == ACTIVE; } /** * @param exchActions Requests. * @param topVer Exchange topology version. */ public boolean changeGlobalState( ExchangeActions exchActions, AffinityTopologyVersion topVer ) { assert exchActions != null; assert topVer != null; if (exchActions.newClusterState() != null) { ChangeGlobalStateContext cgsCtx = lastCgsCtx; assert cgsCtx != null : exchActions; cgsCtx.topologyVersion(topVer); return true; } return false; } /** * Invoke from exchange future. */ public Exception onChangeGlobalState() { GridChangeGlobalStateFuture f = cgsLocFut.get(); ChangeGlobalStateContext cgsCtx = lastCgsCtx; assert cgsCtx != null; if (f != null) f.setRemaining(cgsCtx.topVer); return cgsCtx.activate ? onActivate(cgsCtx) : onDeActivate(cgsCtx); } /** * @param exs Exs. */ public void onFullResponseMessage(Map<UUID, Exception> exs) { assert !F.isEmpty(exs); ChangeGlobalStateContext actx = lastCgsCtx; actx.setFail(); // Revert change if activation request fail. if (actx.activate) { try { cacheProc.onKernalStopCaches(true); cacheProc.stopCaches(true); sharedCtx.affinity().removeAllCacheInfo(); if (!ctx.clientNode()) { sharedCtx.database().onDeActivate(ctx); if (sharedCtx.pageStore() != null) sharedCtx.pageStore().onDeActivate(ctx); if (sharedCtx.wal() != null) sharedCtx.wal().onDeActivate(ctx); } } catch (Exception e) { for (Map.Entry<UUID, Exception> entry : exs.entrySet()) e.addSuppressed(entry.getValue()); log.error("Fail while revert activation request changes", e); } } else { //todo revert change if deactivate request fail } globalState = actx.activate ? INACTIVE : ACTIVE; GridChangeGlobalStateFuture af = cgsLocFut.get(); if (af != null && af.requestId.equals(actx.requestId)) { IgniteCheckedException e = new IgniteCheckedException("see suppressed"); for (Map.Entry<UUID, Exception> entry : exs.entrySet()) e.addSuppressed(entry.getValue()); af.onDone(e); } } /** * */ private Exception onActivate(ChangeGlobalStateContext cgsCtx) { final boolean client = ctx.clientNode(); if (log.isInfoEnabled()) log.info("Start activation process [nodeId=" + this.ctx.localNodeId() + ", client=" + client + ", topVer=" + cgsCtx.topVer + "]"); Collection<CacheConfiguration> cfgs = new ArrayList<>(); for (DynamicCacheChangeRequest req : cgsCtx.batch.requests()) { if (req.startCacheConfiguration() != null) cfgs.add(req.startCacheConfiguration()); } try { if (!client) { sharedCtx.database().lock(); IgnitePageStoreManager pageStore = sharedCtx.pageStore(); if (pageStore != null) pageStore.onActivate(ctx); if (sharedCtx.wal() != null) sharedCtx.wal().onActivate(ctx); sharedCtx.database().initDataBase(); for (CacheConfiguration cfg : cfgs) { if (CU.isSystemCache(cfg.getName())) if (pageStore != null) pageStore.initializeForCache(cfg); } for (CacheConfiguration cfg : cfgs) { if (!CU.isSystemCache(cfg.getName())) if (pageStore != null) pageStore.initializeForCache(cfg); } sharedCtx.database().onActivate(ctx); } if (log.isInfoEnabled()) log.info("Success activate wal, dataBase, pageStore [nodeId=" + ctx.localNodeId() + ", client=" + client + ", topVer=" + cgsCtx.topVer + "]"); return null; } catch (Exception e) { log.error("Fail activate wal, dataBase, pageStore [nodeId=" + ctx.localNodeId() + ", client=" + client + ", topVer=" + cgsCtx.topVer + "]", e); if (!ctx.clientNode()) sharedCtx.database().unLock(); return e; } } /** * */ public Exception onDeActivate(ChangeGlobalStateContext cgsCtx) { final boolean client = ctx.clientNode(); if (log.isInfoEnabled()) log.info("Start deactivate process [id=" + ctx.localNodeId() + ", client=" + client + ", topVer=" + cgsCtx.topVer + "]"); try { ctx.dataStructures().onDeActivate(ctx); ctx.service().onDeActivate(ctx); if (log.isInfoEnabled()) log.info("Success deactivate services, dataStructures, database, pageStore, wal [id=" + ctx.localNodeId() + ", client=" + client + ", topVer=" + cgsCtx.topVer + "]"); return null; } catch (Exception e) { log.error("DeActivation fail [nodeId=" + ctx.localNodeId() + ", client=" + client + ", topVer=" + cgsCtx.topVer + "]", e); return e; } finally { if (!client) sharedCtx.database().unLock(); } } /** * */ private void onFinalActivate(final ChangeGlobalStateContext cgsCtx) { IgniteInternalFuture<?> asyncActivateFut = ctx.closure().runLocalSafe(new Runnable() { @Override public void run() { boolean client = ctx.clientNode(); Exception e = null; try { if (!ctx.config().isDaemon()) ctx.cacheObjects().onUtilityCacheStarted(); ctx.service().onUtilityCacheStarted(); ctx.service().onActivate(ctx); ctx.dataStructures().onActivate(ctx); if (log.isInfoEnabled()) log.info("Success final activate [nodeId=" + ctx.localNodeId() + ", client=" + client + ", topVer=" + cgsCtx.topVer + "]"); } catch (Exception ex) { e = ex; log.error("Fail activate finished [nodeId=" + ctx.localNodeId() + ", client=" + client + ", topVer=" + GridClusterStateProcessor.this.lastCgsCtx.topVer + "]", ex); } finally { globalState = ACTIVE; sendChangeGlobalStateResponse(cgsCtx.requestId, cgsCtx.initiatingNodeId, e); GridClusterStateProcessor.this.lastCgsCtx = null; } } }); cgsCtx.setAsyncActivateFut(asyncActivateFut); } /** * */ public void onFinalDeActivate(ChangeGlobalStateContext cgsCtx) { final boolean client = ctx.clientNode(); if (log.isInfoEnabled()) log.info("Success final deactivate [nodeId=" + ctx.localNodeId() + ", client=" + client + ", topVer=" + cgsCtx.topVer + "]"); Exception ex = null; try { if (!client) { sharedCtx.database().onDeActivate(ctx); if (sharedCtx.pageStore() != null) sharedCtx.pageStore().onDeActivate(ctx); if (sharedCtx.wal() != null) sharedCtx.wal().onDeActivate(ctx); sharedCtx.affinity().removeAllCacheInfo(); } } catch (Exception e) { ex = e; } finally { globalState = INACTIVE; } sendChangeGlobalStateResponse(cgsCtx.requestId, cgsCtx.initiatingNodeId, ex); this.lastCgsCtx = null; } /** * */ public void onExchangeDone() { ChangeGlobalStateContext cgsCtx = lastCgsCtx; assert cgsCtx != null; if (!cgsCtx.isFail()) { if (cgsCtx.activate) onFinalActivate(cgsCtx); else onFinalDeActivate(cgsCtx); } else lastCgsCtx = null; } /** * @param initNodeId Initialize node id. * @param ex Exception. */ private void sendChangeGlobalStateResponse(UUID requestId, UUID initNodeId, Exception ex) { assert requestId != null; assert initNodeId != null; try { GridChangeGlobalStateMessageResponse actResp = new GridChangeGlobalStateMessageResponse(requestId, ex); if (log.isDebugEnabled()) log.debug("Send change global state response [nodeId=" + ctx.localNodeId() + ", topVer=" + ctx.discovery().topologyVersionEx() + ", response=" + actResp + "]"); if (ctx.localNodeId().equals(initNodeId)) processChangeGlobalStateResponse(ctx.localNodeId(), actResp); else sharedCtx.io().send(initNodeId, actResp, SYSTEM_POOL); } catch (IgniteCheckedException e) { log.error("Fail send change global state response to " + initNodeId, e); } } /** * @param msg Message. */ private void processChangeGlobalStateResponse(final UUID nodeId, final GridChangeGlobalStateMessageResponse msg) { assert nodeId != null; assert msg != null; if (log.isDebugEnabled()) log.debug("Received activation response [requestId=" + msg.getRequestId() + ", nodeId=" + nodeId + "]"); ClusterNode node = ctx.discovery().node(nodeId); if (node == null) { U.warn(log, "Received activation response from unknown node (will ignore) [requestId=" + msg.getRequestId() + ']'); return; } UUID requestId = msg.getRequestId(); final GridChangeGlobalStateFuture fut = cgsLocFut.get(); if (fut != null && !fut.isDone() && requestId.equals(fut.requestId)) { fut.initFut.listen(new CI1<IgniteInternalFuture<?>>() { @Override public void apply(IgniteInternalFuture<?> f) { fut.onResponse(nodeId, msg); } }); } } /** * @param activate Activate. */ private String prettyStr(boolean activate) { return activate ? "activate" : "deactivate"; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridClusterStateProcessor.class, this); } /** * */ private static class GridChangeGlobalStateFuture extends GridFutureAdapter { /** Request id. */ @GridToStringInclude private final UUID requestId; /** Activate. */ private final boolean activate; /** Nodes. */ @GridToStringInclude private final Set<UUID> remaining = new HashSet<>(); /** Responses. */ @GridToStringInclude private final Map<UUID, GridChangeGlobalStateMessageResponse> resps = new HashMap<>(); /** Context. */ @GridToStringExclude private final GridKernalContext ctx; /** */ @GridToStringExclude private final Object mux = new Object(); /** */ @GridToStringInclude private final GridFutureAdapter initFut = new GridFutureAdapter(); /** Grid logger. */ @GridToStringExclude private final IgniteLogger log; /** * */ public GridChangeGlobalStateFuture(UUID requestId, boolean activate, GridKernalContext ctx) { this.requestId = requestId; this.activate = activate; this.ctx = ctx; this.log = ctx.log(getClass()); } /** * @param event Event. */ public void onDiscoveryEvent(DiscoveryEvent event) { assert event != null; if (isDone()) return; boolean allReceived = false; synchronized (mux) { if (remaining.remove(event.eventNode().id())) allReceived = remaining.isEmpty(); } if (allReceived) onAllReceived(); } /** * */ public void setRemaining(AffinityTopologyVersion topVer) { Collection<ClusterNode> nodes = ctx.discovery().nodes(topVer); List<UUID> ids = new ArrayList<>(nodes.size()); for (ClusterNode n : nodes) ids.add(n.id()); if (log.isDebugEnabled()) log.debug("Setup remaining node [id=" + ctx.localNodeId() + ", client=" + ctx.clientNode() + ", topVer=" + ctx.discovery().topologyVersionEx() + ", nodes=" + Arrays.toString(ids.toArray()) + "]"); synchronized (mux) { remaining.addAll(ids); } initFut.onDone(); } /** * @param msg Activation message response. */ public void onResponse(UUID nodeId, GridChangeGlobalStateMessageResponse msg) { assert msg != null; if (isDone()) return; boolean allReceived = false; synchronized (mux) { if (remaining.remove(nodeId)) allReceived = remaining.isEmpty(); resps.put(nodeId, msg); } if (allReceived) onAllReceived(); } /** * */ private void onAllReceived() { Throwable e = new Throwable(); boolean fail = false; for (Map.Entry<UUID, GridChangeGlobalStateMessageResponse> entry : resps.entrySet()) { GridChangeGlobalStateMessageResponse r = entry.getValue(); if (r.getError() != null) { fail = true; e.addSuppressed(r.getError()); } } if (fail) onDone(e); else onDone(); } /** {@inheritDoc} */ @Override public boolean onDone(@Nullable Object res, @Nullable Throwable err) { ctx.state().cgsLocFut.set(null); return super.onDone(res, err); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridChangeGlobalStateFuture.class, this); } } /** * * */ private static class ChangeGlobalStateContext { /** Request id. */ private final UUID requestId; /** Initiating node id. */ private final UUID initiatingNodeId; /** Batch requests. */ private final DynamicCacheChangeBatch batch; /** Activate. */ private final boolean activate; /** Topology version. */ private AffinityTopologyVersion topVer; /** Fail. */ private boolean fail; /** Async activate future. */ private IgniteInternalFuture<?> asyncActivateFut; /** * */ public ChangeGlobalStateContext( UUID requestId, UUID initiatingNodeId, DynamicCacheChangeBatch batch, boolean activate ) { this.requestId = requestId; this.batch = batch; this.activate = activate; this.initiatingNodeId = initiatingNodeId; } /** * @param topVer Topology version. */ public void topologyVersion(AffinityTopologyVersion topVer) { this.topVer = topVer; } /** * */ private void setFail() { fail = true; } /** * */ private boolean isFail() { return fail; } /** * */ public IgniteInternalFuture<?> getAsyncActivateFut() { return asyncActivateFut; } /** * @param asyncActivateFut Async activate future. */ public void setAsyncActivateFut(IgniteInternalFuture<?> asyncActivateFut) { this.asyncActivateFut = asyncActivateFut; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(ChangeGlobalStateContext.class, this); } } /** * */ private static class ClientChangeGlobalStateComputeRequest implements IgniteRunnable { /** */ private static final long serialVersionUID = 0L; /** Activation. */ private final boolean activation; /** Ignite. */ @IgniteInstanceResource private Ignite ignite; /** * */ private ClientChangeGlobalStateComputeRequest(boolean activation) { this.activation = activation; } /** {@inheritDoc} */ @Override public void run() { ignite.active(activation); } } }
/* * Copyright (c) 2003, 2004, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package com.sun.corba.se.spi.presentation.rmi; import javax.rmi.CORBA.Tie ; import org.omg.CORBA.portable.Delegate ; import org.omg.CORBA.portable.ObjectImpl ; import org.omg.CORBA.portable.OutputStream ; import org.omg.PortableServer.POA ; import org.omg.PortableServer.POAManager ; import org.omg.PortableServer.Servant ; import org.omg.PortableServer.POAPackage.WrongPolicy ; import org.omg.PortableServer.POAPackage.ServantNotActive ; import org.omg.PortableServer.POAManagerPackage.AdapterInactive ; import org.omg.CORBA.ORB ; import com.sun.corba.se.spi.logging.CORBALogDomains ; import com.sun.corba.se.impl.logging.ORBUtilSystemException ; // XXX Getting rid of this requires introducing an ObjectAdapterManager abstraction // as an interface into the OA framework. import com.sun.corba.se.impl.oa.poa.POAManagerImpl ; /** Provide access to stub delegate and type id information * independent of the stub type. This class exists because * ObjectImpl does not have an interface for the 3 delegate and * type id methods, so a DynamicStub has a different type. * We cannot simply change ObjectImpl as it is a standard API. * We also cannot change the code generation of Stubs, as that * is also standard. Hence I am left with this ugly class. */ public abstract class StubAdapter { private StubAdapter() {} private static ORBUtilSystemException wrapper = ORBUtilSystemException.get( CORBALogDomains.RPC_PRESENTATION ) ; public static boolean isStubClass( Class cls ) { return (ObjectImpl.class.isAssignableFrom( cls )) || (DynamicStub.class.isAssignableFrom( cls )) ; } public static boolean isStub( Object stub ) { return (stub instanceof DynamicStub) || (stub instanceof ObjectImpl) ; } public static void setDelegate( Object stub, Delegate delegate ) { if (stub instanceof DynamicStub) ((DynamicStub)stub).setDelegate( delegate ) ; else if (stub instanceof ObjectImpl) ((ObjectImpl)stub)._set_delegate( delegate ) ; else throw wrapper.setDelegateRequiresStub() ; } /** Use implicit activation to get an object reference for the servant. */ public static org.omg.CORBA.Object activateServant( Servant servant ) { POA poa = servant._default_POA() ; org.omg.CORBA.Object ref = null ; try { ref = poa.servant_to_reference( servant ) ; } catch (ServantNotActive sna) { throw wrapper.getDelegateServantNotActive( sna ) ; } catch (WrongPolicy wp) { throw wrapper.getDelegateWrongPolicy( wp ) ; } // Make sure that the POAManager is activated if no other // POAManager state management has taken place. POAManager mgr = poa.the_POAManager() ; if (mgr instanceof POAManagerImpl) { POAManagerImpl mgrImpl = (POAManagerImpl)mgr ; mgrImpl.implicitActivation() ; } return ref ; } /** Given any Tie, return the corresponding object refernce, activating * the Servant if necessary. */ public static org.omg.CORBA.Object activateTie( Tie tie ) { /** Any implementation of Tie should be either a Servant or an ObjectImpl, * depending on which style of code generation is used. rmic -iiop by * default results in an ObjectImpl-based Tie, while rmic -iiop -poa * results in a Servant-based Tie. Dynamic RMI-IIOP also uses Servant-based * Ties (see impl.presentation.rmi.ReflectiveTie). */ if (tie instanceof ObjectImpl) { return tie.thisObject() ; } else if (tie instanceof Servant) { Servant servant = (Servant)tie ; return activateServant( servant ) ; } else { throw wrapper.badActivateTieCall() ; } } /** This also gets the delegate from a Servant by * using Servant._this_object() */ public static Delegate getDelegate( Object stub ) { if (stub instanceof DynamicStub) return ((DynamicStub)stub).getDelegate() ; else if (stub instanceof ObjectImpl) return ((ObjectImpl)stub)._get_delegate() ; else if (stub instanceof Tie) { Tie tie = (Tie)stub ; org.omg.CORBA.Object ref = activateTie( tie ) ; return getDelegate( ref ) ; } else throw wrapper.getDelegateRequiresStub() ; } public static ORB getORB( Object stub ) { if (stub instanceof DynamicStub) return ((DynamicStub)stub).getORB() ; else if (stub instanceof ObjectImpl) return (ORB)((ObjectImpl)stub)._orb() ; else throw wrapper.getOrbRequiresStub() ; } public static String[] getTypeIds( Object stub ) { if (stub instanceof DynamicStub) return ((DynamicStub)stub).getTypeIds() ; else if (stub instanceof ObjectImpl) return ((ObjectImpl)stub)._ids() ; else throw wrapper.getTypeIdsRequiresStub() ; } public static void connect( Object stub, ORB orb ) throws java.rmi.RemoteException { if (stub instanceof DynamicStub) ((DynamicStub)stub).connect( (com.sun.corba.se.spi.orb.ORB)orb ) ; else if (stub instanceof javax.rmi.CORBA.Stub) ((javax.rmi.CORBA.Stub)stub).connect( orb ) ; else if (stub instanceof ObjectImpl) orb.connect( (org.omg.CORBA.Object)stub ) ; else throw wrapper.connectRequiresStub() ; } public static boolean isLocal( Object stub ) { if (stub instanceof DynamicStub) return ((DynamicStub)stub).isLocal() ; else if (stub instanceof ObjectImpl) return ((ObjectImpl)stub)._is_local() ; else throw wrapper.isLocalRequiresStub() ; } public static OutputStream request( Object stub, String operation, boolean responseExpected ) { if (stub instanceof DynamicStub) return ((DynamicStub)stub).request( operation, responseExpected ) ; else if (stub instanceof ObjectImpl) return ((ObjectImpl)stub)._request( operation, responseExpected ) ; else throw wrapper.requestRequiresStub() ; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.voiceid.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/voice-id-2021-09-27/TagResource" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class TagResourceRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The Amazon Resource Name (ARN) of the Voice ID resource you want to tag. * </p> */ private String resourceArn; /** * <p> * The list of tags to assign to the specified resource. * </p> */ private java.util.List<Tag> tags; /** * <p> * The Amazon Resource Name (ARN) of the Voice ID resource you want to tag. * </p> * * @param resourceArn * The Amazon Resource Name (ARN) of the Voice ID resource you want to tag. */ public void setResourceArn(String resourceArn) { this.resourceArn = resourceArn; } /** * <p> * The Amazon Resource Name (ARN) of the Voice ID resource you want to tag. * </p> * * @return The Amazon Resource Name (ARN) of the Voice ID resource you want to tag. */ public String getResourceArn() { return this.resourceArn; } /** * <p> * The Amazon Resource Name (ARN) of the Voice ID resource you want to tag. * </p> * * @param resourceArn * The Amazon Resource Name (ARN) of the Voice ID resource you want to tag. * @return Returns a reference to this object so that method calls can be chained together. */ public TagResourceRequest withResourceArn(String resourceArn) { setResourceArn(resourceArn); return this; } /** * <p> * The list of tags to assign to the specified resource. * </p> * * @return The list of tags to assign to the specified resource. */ public java.util.List<Tag> getTags() { return tags; } /** * <p> * The list of tags to assign to the specified resource. * </p> * * @param tags * The list of tags to assign to the specified resource. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new java.util.ArrayList<Tag>(tags); } /** * <p> * The list of tags to assign to the specified resource. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * The list of tags to assign to the specified resource. * @return Returns a reference to this object so that method calls can be chained together. */ public TagResourceRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new java.util.ArrayList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * The list of tags to assign to the specified resource. * </p> * * @param tags * The list of tags to assign to the specified resource. * @return Returns a reference to this object so that method calls can be chained together. */ public TagResourceRequest withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResourceArn() != null) sb.append("ResourceArn: ").append(getResourceArn()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof TagResourceRequest == false) return false; TagResourceRequest other = (TagResourceRequest) obj; if (other.getResourceArn() == null ^ this.getResourceArn() == null) return false; if (other.getResourceArn() != null && other.getResourceArn().equals(this.getResourceArn()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResourceArn() == null) ? 0 : getResourceArn().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public TagResourceRequest clone() { return (TagResourceRequest) super.clone(); } }
/* * Copyright (c) 2015, Andrey Lavrov <lavroff@gmail.com> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package cy.alavrov.jminerguide.data.booster; import cy.alavrov.jminerguide.log.JMGLogger; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import javax.swing.DefaultComboBoxModel; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.input.SAXBuilder; import org.jdom2.output.Format; import org.jdom2.output.XMLOutputter; /** * Container for booster ships. * At this moment - there's only one current booster. * @author Andrey Lavrov <lavroff@gmail.com> */ public class BoosterShipContainer { private LinkedHashMap<String, BoosterShip> boosterShips; private final BoosterShip notABoosterShip = new NoBoosterShip(); private final String path; private boolean useBoosterShip; private String selectedBoosterShip; public BoosterShipContainer(String path) { boosterShips = new LinkedHashMap<>(); BoosterShip booster = new BoosterShip("Generic Ship"); boosterShips.put(booster.getName(), booster); this.path = path; useBoosterShip = false; } /** * Loads booster ships from a configuration file. */ public synchronized void load() { JMGLogger.logWarning("Loading booster ships..."); File src = new File(path+File.separator+"boosters.dat"); if (!src.exists()) { JMGLogger.logWarning("No booster ship file found, creating new."); save(); return; } LinkedHashMap<String, BoosterShip> newBoosterShips = new LinkedHashMap<>(); String lastSelectedBoosterShip = null; boolean doUseBoosterShip = false; SAXBuilder builder = new SAXBuilder(); try { Document doc = builder.build(src); Element rootNode = doc.getRootElement(); doUseBoosterShip = rootNode.getAttribute("useship").getBooleanValue(); lastSelectedBoosterShip = rootNode.getChildText("lastselectedboostership"); List<Element> shipList = rootNode.getChildren("booster"); for (Element shipEl : shipList) { BoosterShip ship = new BoosterShip(shipEl); newBoosterShips.put(ship.getName(), ship); } } catch (Exception e) { JMGLogger.logSevere("Unable to load a configuration file for booster ships", e); } useBoosterShip = doUseBoosterShip; boosterShips = newBoosterShips; if (boosterShips.isEmpty()) { BoosterShip booster = new BoosterShip("Generic Ship"); boosterShips.put(booster.getName(), booster); } // booster ships is never empty, so it's ok. if (lastSelectedBoosterShip == null) lastSelectedBoosterShip = boosterShips.values().iterator().next().getName(); selectedBoosterShip = lastSelectedBoosterShip; } /** * Saves all the booster ships into a file. */ public synchronized void save() { File src = new File(path+File.separator+"boosters.dat"); if (!src.exists()) { try { if (!src.createNewFile()) { JMGLogger.logSevere("Unable to create a configuration file for booster ships"); return; } } catch (IOException e) { JMGLogger.logSevere("Unable to create a configuration file for booster ships", e); return; } } Element root = new Element("boosters"); Document doc = new Document(root); root.setAttribute("useship", String.valueOf(useBoosterShip)); String lastBoosterShip = selectedBoosterShip; if (lastBoosterShip == null) lastBoosterShip = boosterShips.values().iterator().next().getName(); root.addContent(new Element("lastselectedboostership").setText(lastBoosterShip)); for (BoosterShip booster : boosterShips.values()) { Element elem = booster.getXMLElement(); root.addContent(elem); } XMLOutputter xmlOutput = new XMLOutputter(); xmlOutput.setFormat(Format.getPrettyFormat()); try (FileOutputStream fos = new FileOutputStream(path+File.separator+"boosters.dat")){ xmlOutput.output(doc, fos); } catch (Exception e) { JMGLogger.logSevere("Unable to save "+path+File.separator+"boosters.dat", e); } } /** * Returns a combo box model with booster ships for a Swing combo box. * Ships are sorted by insertion order. * @return */ public synchronized DefaultComboBoxModel<BoosterShip> getBoosterShipModel() { DefaultComboBoxModel<BoosterShip> out = new DefaultComboBoxModel<>(); if (boosterShips.isEmpty()) return out; for (BoosterShip ship : boosterShips.values()) { out.addElement(ship); } return out; } public synchronized BoosterShip getBoosterShip(String name) { if (name == null) return null; return boosterShips.get(name); } /** * Generic hull without any booster links. * @return */ public BoosterShip getNoBooster() { return notABoosterShip; } /** * Should we use boosting ship for boosting? * @return */ public synchronized boolean isUsingBoosterShip() { return useBoosterShip; } /** * Sets if we should use boosting ships for boosting. * @param what */ public synchronized void setUsingBoosterShip(boolean what) { useBoosterShip = what; } /** * Creates a new booster ship with a given name. If the name is used already, * null returned. Same with null and whitespace-only names. * @param name * @return */ public synchronized BoosterShip createNewBoosterShip(String name) { if (name == null || name.trim().isEmpty()) return null; if (getBoosterShip(name) != null) return null; BoosterShip newShip = new BoosterShip(name); boosterShips.put(name, newShip); return newShip; } /** * How many booster ships are there? * @return */ public synchronized int getBoosterShipCount() { return boosterShips.size(); } /** * Deletes the booster ship from container. Can't delete last ship! * @param ship * @return true, if successfully deleted. */ public synchronized boolean deleteBoosterShip(BoosterShip ship) { if (ship == null) return false; if (getBoosterShipCount() < 2) return false; BoosterShip res = boosterShips.remove(ship.getName()); return (res != null); } /** * Changes booster ship's name and moves it to the appropriate key. * New name shouldn't be used by another ship. * New name shouldn't be empty or whitespace-only. * New name should be different from the current one. * Null parameters lead to false. * @param oldName name of existing booster ship. * @param newName desired new name. * @return true, if renamed successfully, false otherwise. */ public synchronized boolean renameBoosterShip(String oldName, String newName) { if (oldName == null || newName == null) return false; if (newName.trim().isEmpty()) return false; if (oldName.equals(newName)) return false; if (boosterShips.containsKey(newName)) return false; BoosterShip oldship = boosterShips.remove(oldName); if (oldship == null) return false; oldship.setName(newName); boosterShips.put(newName, oldship); return true; } /** * Sets the name of a last selected booster ship. * @param name */ public synchronized void setSelectedBoosterShip(String name) { selectedBoosterShip = name; } /** * Returns last selected booster ship. * @return */ public synchronized BoosterShip getLastSelectedBoosterShip() { BoosterShip ret = boosterShips.get(selectedBoosterShip); // ships is never empty, so it's safe if (ret == null) ret = boosterShips.values().iterator().next(); return ret; } }
package com.swrve.sdk.localstorage; import android.util.Log; import com.swrve.sdk.SwrveHelper; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; /** * Used internally to provide a multi-layer cache of events and resource diffs. */ public class MemoryCachedLocalStorage implements ILocalStorage { private ILocalStorage cache; private ILocalStorage secondaryStorage; private Object eventLock = new Object(); private Object cacheLock = new Object(); public MemoryCachedLocalStorage(ILocalStorage cache, ILocalStorage secondaryStorage) { this.cache = cache; this.secondaryStorage = secondaryStorage; } public ILocalStorage getSecondaryStorage() { return secondaryStorage; } public void setSecondaryStorage(ILocalStorage secondaryStorage) { this.secondaryStorage = secondaryStorage; } public ILocalStorage getCacheStorage() { return cache; } public void setCacheStorage(ILocalStorage cacheStorage) { this.cache = cacheStorage; } @Override public String getCacheEntryForUser(String userId, String category) { synchronized (cacheLock) { String result = cache.getCacheEntryForUser(userId, category); if (result == null && secondaryStorage != null) { result = secondaryStorage.getCacheEntryForUser(userId, category); } return result; } } @Override public String getSecureCacheEntryForUser(String userId, String category, String uniqueKey) throws SecurityException { String cachedContent = null; String cachedSignature = null; synchronized (cacheLock) { cachedContent = cache.getCacheEntryForUser(userId, category); cachedSignature = cache.getCacheEntryForUser(userId, category + SIGNATURE_SUFFIX); if (SwrveHelper.isNullOrEmpty(cachedContent) && secondaryStorage != null) { // Was not cached in memory, read from disk cachedContent = secondaryStorage.getCacheEntryForUser(userId, category); cachedSignature = secondaryStorage.getCacheEntryForUser(userId, category + SIGNATURE_SUFFIX); } } if (!SwrveHelper.isNullOrEmpty(cachedContent)) { try { String computedSignature = SwrveHelper.createHMACWithMD5(cachedContent, uniqueKey); if (SwrveHelper.isNullOrEmpty(computedSignature) || SwrveHelper.isNullOrEmpty(cachedSignature) || !cachedSignature.equals(computedSignature)) { throw new SecurityException("Signature validation failed"); } } catch (NoSuchAlgorithmException e) { Log.i("SwrveSDK", "Computing signature failed because of invalid algorithm"); } catch (InvalidKeyException e) { Log.i("SwrveSDK", "Computing signature failed because of an invalid key"); } } return cachedContent; } public LinkedHashMap<ILocalStorage, LinkedHashMap<Long, String>> getCombinedFirstNEvents(Integer n) { synchronized (eventLock) { LinkedHashMap<ILocalStorage, LinkedHashMap<Long, String>> result = new LinkedHashMap<ILocalStorage, LinkedHashMap<Long, String>>(); int eventCount = 0; if (secondaryStorage != null) { LinkedHashMap<Long, String> events = secondaryStorage.getFirstNEvents(n); eventCount = events.size(); if (eventCount > 0) { result.put(secondaryStorage, events); } } if (n - eventCount > 0) { LinkedHashMap<Long, String> events = cache.getFirstNEvents(n - eventCount); int remainingEventCount = events.size(); if (remainingEventCount > 0) { result.put(cache, events); } } return result; } } @Override public void addEvent(String eventJSON) throws Exception { synchronized (eventLock) { cache.addEvent(eventJSON); } } @Override public void removeEventsById(Collection<Long> ids) { synchronized (eventLock) { cache.removeEventsById(ids); } } @Override public LinkedHashMap<Long, String> getFirstNEvents(Integer ids) { synchronized (eventLock) { return cache.getFirstNEvents(ids); } } @Override public void setCacheEntryForUser(String userId, String category, String rawData) { synchronized (cacheLock) { cache.setCacheEntryForUser(userId, category, rawData); } } @Override public void setSecureCacheEntryForUser(String userId, String category, String rawData, String signature) { synchronized (cacheLock) { cache.setCacheEntryForUser(userId, category, rawData); cache.setCacheEntryForUser(userId, category + SIGNATURE_SUFFIX, signature); } } public void setAndFlushSecureSharedEntryForUser(String userId, String category, String rawData, String uniqueKey) { synchronized (cacheLock) { try { String signature = SwrveHelper.createHMACWithMD5(rawData, uniqueKey); // Save to memory and secondary storage cache.setSecureCacheEntryForUser(userId, category, rawData, signature); if (secondaryStorage != null) { secondaryStorage.setSecureCacheEntryForUser(userId, category, rawData, signature); } } catch (NoSuchAlgorithmException e) { Log.i("SwrveSDK", "Computing signature failed because of invalid algorithm"); cache.setCacheEntryForUser(userId, category, rawData); if (secondaryStorage != null) { secondaryStorage.setCacheEntryForUser(userId, category, rawData); } } catch (InvalidKeyException e) { Log.i("SwrveSDK", "Computing signature failed because of an invalid key"); } } } public void setAndFlushSharedEntry(String category, String rawData) { synchronized (cacheLock) { // Save to memory and secondary storage cache.setCacheEntryForUser(category, category, rawData); if (secondaryStorage != null) { secondaryStorage.setCacheEntryForUser(category, category, rawData); } } } public String getSharedCacheEntry(String category) { return getCacheEntryForUser(category, category); } public void flush() throws Exception { if (cache != secondaryStorage && cache instanceof IFlushableLocalStorage && secondaryStorage instanceof IFastInsertLocalStorage) { IFlushableLocalStorage flushableStorage = ((IFlushableLocalStorage) cache); IFastInsertLocalStorage targetStorage = ((IFastInsertLocalStorage) secondaryStorage); synchronized (eventLock) { flushableStorage.flushEvents(targetStorage); } synchronized (cacheLock) { flushableStorage.flushCache(targetStorage); } } } @Override public Map<Entry<String, String>, String> getAllCacheEntries() { return cache.getAllCacheEntries(); } public Map<Entry<String, String>, String> getCombinedCacheEntries() { Map<Entry<String, String>, String> result = cache.getAllCacheEntries(); if (secondaryStorage != null) { result.putAll(secondaryStorage.getAllCacheEntries()); } return result; } @Override public void close() { cache.close(); if (secondaryStorage != null) { secondaryStorage.close(); } } @Override public void reset() { cache.reset(); if (secondaryStorage != null) { secondaryStorage.reset(); } } }
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.media; import android.annotation.TargetApi; import android.content.Context; import android.graphics.ImageFormat; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.Image; import android.media.ImageReader; import android.os.Build; import android.os.Handler; import android.os.HandlerThread; import android.util.Size; import android.view.Surface; import org.chromium.base.Log; import org.chromium.base.annotations.JNINamespace; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; /** * This class implements Video Capture using Camera2 API, introduced in Android * API 21 (L Release). Capture takes place in the current Looper, while pixel * download takes place in another thread used by ImageReader. A number of * static methods are provided to retrieve information on current system cameras * and their capabilities, using android.hardware.camera2.CameraManager. **/ @JNINamespace("media") @TargetApi(Build.VERSION_CODES.LOLLIPOP) public class VideoCaptureCamera2 extends VideoCapture { // Inner class to extend a CameraDevice state change listener. private class CrStateListener extends CameraDevice.StateCallback { @Override public void onOpened(CameraDevice cameraDevice) { mCameraDevice = cameraDevice; changeCameraStateAndNotify(CameraState.CONFIGURING); if (!createCaptureObjects()) { changeCameraStateAndNotify(CameraState.STOPPED); nativeOnError(mNativeVideoCaptureDeviceAndroid, "Error configuring camera"); } } @Override public void onDisconnected(CameraDevice cameraDevice) { cameraDevice.close(); mCameraDevice = null; changeCameraStateAndNotify(CameraState.STOPPED); } @Override public void onError(CameraDevice cameraDevice, int error) { cameraDevice.close(); mCameraDevice = null; changeCameraStateAndNotify(CameraState.STOPPED); nativeOnError(mNativeVideoCaptureDeviceAndroid, "Camera device error " + Integer.toString(error)); } }; // Inner class to extend a Capture Session state change listener. private class CrCaptureSessionListener extends CameraCaptureSession.StateCallback { @Override public void onConfigured(CameraCaptureSession cameraCaptureSession) { Log.d(TAG, "onConfigured"); mCaptureSession = cameraCaptureSession; createCaptureRequest(); changeCameraStateAndNotify(CameraState.STARTED); } @Override public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) { // TODO(mcasas): When signalling error, C++ will tear us down. Is there need for // cleanup? changeCameraStateAndNotify(CameraState.STOPPED); nativeOnError(mNativeVideoCaptureDeviceAndroid, "Camera session configuration error"); } }; // Internal class implementing the ImageReader listener. Gets pinged when a // new frame is been captured and downloaded to memory-backed buffers. private class CrImageReaderListener implements ImageReader.OnImageAvailableListener { @Override public void onImageAvailable(ImageReader reader) { Image image = null; try { image = reader.acquireLatestImage(); if (image == null) return; if (image.getFormat() != ImageFormat.YUV_420_888 || image.getPlanes().length != 3) { Log.e(TAG, "Unexpected image format: %d or #planes: %d", image.getFormat(), image.getPlanes().length); return; } if (reader.getWidth() != image.getWidth() || reader.getHeight() != image.getHeight()) { throw new IllegalStateException("ImageReader size " + reader.getWidth() + "x" + reader.getHeight() + " did not match Image size " + image.getWidth() + "x" + image.getHeight()); } readImageIntoBuffer(image, mCapturedData); nativeOnFrameAvailable(mNativeVideoCaptureDeviceAndroid, mCapturedData, mCapturedData.length, getCameraRotation()); } catch (IllegalStateException ex) { Log.e(TAG, "acquireLatestImage():" + ex); return; } finally { if (image != null) { image.close(); } } } }; private byte[] mCapturedData; private CameraDevice mCameraDevice = null; private CaptureRequest.Builder mPreviewBuilder = null; private CameraCaptureSession mCaptureSession = null; private ImageReader mImageReader = null; private static final double kNanoSecondsToFps = 1.0E-9; private static final String TAG = "cr.media"; private static enum CameraState { OPENING, CONFIGURING, STARTED, STOPPED } private CameraState mCameraState = CameraState.STOPPED; private final Object mCameraStateLock = new Object(); // Service function to grab CameraCharacteristics and handle exceptions. private static CameraCharacteristics getCameraCharacteristics(Context appContext, int id) { final CameraManager manager = (CameraManager) appContext.getSystemService(Context.CAMERA_SERVICE); try { return manager.getCameraCharacteristics(Integer.toString(id)); } catch (CameraAccessException ex) { Log.e(TAG, "getCameraCharacteristics: " + ex); } return null; } private boolean createCaptureObjects() { Log.d(TAG, "createCaptureObjects"); if (mCameraDevice == null) return false; // Create an ImageReader and plug a thread looper into it to have // readback take place on its own thread. final int maxImages = 2; mImageReader = ImageReader.newInstance(mCaptureFormat.getWidth(), mCaptureFormat.getHeight(), mCaptureFormat.getPixelFormat(), maxImages); HandlerThread thread = new HandlerThread("CameraPreview"); thread.start(); final Handler backgroundHandler = new Handler(thread.getLooper()); final CrImageReaderListener imageReaderListener = new CrImageReaderListener(); mImageReader.setOnImageAvailableListener(imageReaderListener, backgroundHandler); // The Preview template specifically means "high frame rate is given // priority over the highest-quality post-processing". try { mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); } catch (CameraAccessException | IllegalArgumentException | SecurityException ex) { Log.e(TAG, "createCaptureRequest: " + ex); return false; } if (mPreviewBuilder == null) { Log.e(TAG, "mPreviewBuilder error"); return false; } // Construct an ImageReader Surface and plug it into our CaptureRequest.Builder. mPreviewBuilder.addTarget(mImageReader.getSurface()); // A series of configuration options in the PreviewBuilder mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); mPreviewBuilder.set( CaptureRequest.NOISE_REDUCTION_MODE, CameraMetadata.NOISE_REDUCTION_MODE_FAST); mPreviewBuilder.set(CaptureRequest.EDGE_MODE, CameraMetadata.EDGE_MODE_FAST); mPreviewBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON); // SENSOR_EXPOSURE_TIME ? List<Surface> surfaceList = new ArrayList<Surface>(1); surfaceList.add(mImageReader.getSurface()); final CrCaptureSessionListener captureSessionListener = new CrCaptureSessionListener(); try { mCameraDevice.createCaptureSession(surfaceList, captureSessionListener, null); } catch (CameraAccessException | IllegalArgumentException | SecurityException ex) { Log.e(TAG, "createCaptureSession: " + ex); return false; } // Wait for trigger on CrCaptureSessionListener.onConfigured(); return true; } private boolean createCaptureRequest() { Log.d(TAG, "createCaptureRequest"); try { // This line triggers the capture. No |listener| is registered, so // we will not get notified of capture events, instead, ImageReader // will trigger every time a downloaded image is ready. Since //|handler| is null, we'll work on the current Thread Looper. mCaptureSession.setRepeatingRequest(mPreviewBuilder.build(), null, null); } catch (CameraAccessException | IllegalArgumentException | SecurityException ex) { Log.e(TAG, "setRepeatingRequest: " + ex); return false; } // Now wait for trigger on CrImageReaderListener.onImageAvailable(); return true; } private static void readImageIntoBuffer(Image image, byte[] data) { final int imageWidth = image.getWidth(); final int imageHeight = image.getHeight(); final Image.Plane[] planes = image.getPlanes(); int offset = 0; for (int plane = 0; plane < planes.length; ++plane) { final ByteBuffer buffer = planes[plane].getBuffer(); final int rowStride = planes[plane].getRowStride(); // Experimentally, U and V planes have |pixelStride| = 2, which // essentially means they are packed. That's silly, because we are // forced to unpack here. final int pixelStride = planes[plane].getPixelStride(); final int planeWidth = (plane == 0) ? imageWidth : imageWidth / 2; final int planeHeight = (plane == 0) ? imageHeight : imageHeight / 2; if (pixelStride == 1 && rowStride == planeWidth) { // Copy whole plane from buffer into |data| at once. buffer.get(data, offset, planeWidth * planeHeight); offset += planeWidth * planeHeight; } else { // Copy pixels one by one respecting pixelStride and rowStride. byte[] rowData = new byte[rowStride]; for (int row = 0; row < planeHeight - 1; ++row) { buffer.get(rowData, 0, rowStride); for (int col = 0; col < planeWidth; ++col) { data[offset++] = rowData[col * pixelStride]; } } // Last row is special in some devices and may not contain the full // |rowStride| bytes of data. See http://crbug.com/458701. buffer.get(rowData, 0, Math.min(rowStride, buffer.remaining())); for (int col = 0; col < planeWidth; ++col) { data[offset++] = rowData[col * pixelStride]; } } } } private void changeCameraStateAndNotify(CameraState state) { synchronized (mCameraStateLock) { mCameraState = state; mCameraStateLock.notifyAll(); } } static boolean isLegacyDevice(Context appContext, int id) { final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(appContext, id); return cameraCharacteristics != null && cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; } static int getNumberOfCameras(Context appContext) { final CameraManager manager = (CameraManager) appContext.getSystemService(Context.CAMERA_SERVICE); try { return manager.getCameraIdList().length; } catch (CameraAccessException | SecurityException ex) { // SecurityException is an undocumented exception, but has been seen in // http://crbug/605424. Log.e(TAG, "getNumberOfCameras: getCameraIdList(): " + ex); return 0; } } static int getCaptureApiType(int id, Context appContext) { final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(appContext, id); if (cameraCharacteristics == null) { return CaptureApiType.API_TYPE_UNKNOWN; } final int supportedHWLevel = cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); switch (supportedHWLevel) { case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: return CaptureApiType.API2_LEGACY; case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL: return CaptureApiType.API2_FULL; case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: return CaptureApiType.API2_LIMITED; default: return CaptureApiType.API2_LEGACY; } } static String getName(int id, Context appContext) { final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(appContext, id); if (cameraCharacteristics == null) return null; final int facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING); return "camera2 " + id + ", facing " + ((facing == CameraCharacteristics.LENS_FACING_FRONT) ? "front" : "back"); } static VideoCaptureFormat[] getDeviceSupportedFormats(Context appContext, int id) { final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(appContext, id); if (cameraCharacteristics == null) return null; final int[] capabilities = cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); // Per-format frame rate via getOutputMinFrameDuration() is only available if the // property REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR is set. boolean minFrameDurationAvailable = false; for (int cap : capabilities) { if (cap == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) { minFrameDurationAvailable = true; break; } } ArrayList<VideoCaptureFormat> formatList = new ArrayList<VideoCaptureFormat>(); final StreamConfigurationMap streamMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); final int[] formats = streamMap.getOutputFormats(); for (int format : formats) { final Size[] sizes = streamMap.getOutputSizes(format); if (sizes == null) continue; for (Size size : sizes) { double minFrameRate = 0.0f; if (minFrameDurationAvailable) { final long minFrameDuration = streamMap.getOutputMinFrameDuration(format, size); minFrameRate = (minFrameDuration == 0) ? 0.0f : (1.0 / kNanoSecondsToFps * minFrameDuration); } else { // TODO(mcasas): find out where to get the info from in this case. // Hint: perhaps using SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS. minFrameRate = 0.0; } formatList.add(new VideoCaptureFormat( size.getWidth(), size.getHeight(), (int) minFrameRate, 0)); } } return formatList.toArray(new VideoCaptureFormat[formatList.size()]); } VideoCaptureCamera2(Context context, int id, long nativeVideoCaptureDeviceAndroid) { super(context, id, nativeVideoCaptureDeviceAndroid); } @Override public boolean allocate(int width, int height, int frameRate) { Log.d(TAG, "allocate: requested (%d x %d) @%dfps", width, height, frameRate); synchronized (mCameraStateLock) { if (mCameraState == CameraState.OPENING || mCameraState == CameraState.CONFIGURING) { Log.e(TAG, "allocate() invoked while Camera is busy opening/configuring."); return false; } } final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mContext, mId); final StreamConfigurationMap streamMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // Find closest supported size. final Size[] supportedSizes = streamMap.getOutputSizes(ImageFormat.YUV_420_888); if (supportedSizes == null) return false; Size closestSupportedSize = null; int minDiff = Integer.MAX_VALUE; for (Size size : supportedSizes) { final int diff = Math.abs(size.getWidth() - width) + Math.abs(size.getHeight() - height); if (diff < minDiff) { minDiff = diff; closestSupportedSize = size; } } if (minDiff == Integer.MAX_VALUE) { Log.e(TAG, "No supported resolutions."); return false; } Log.d(TAG, "allocate: matched (%d x %d)", closestSupportedSize.getWidth(), closestSupportedSize.getHeight()); // |mCaptureFormat| is also used to configure the ImageReader. mCaptureFormat = new VideoCaptureFormat(closestSupportedSize.getWidth(), closestSupportedSize.getHeight(), frameRate, ImageFormat.YUV_420_888); int expectedFrameSize = mCaptureFormat.mWidth * mCaptureFormat.mHeight * ImageFormat.getBitsPerPixel(mCaptureFormat.mPixelFormat) / 8; mCapturedData = new byte[expectedFrameSize]; mCameraNativeOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); // TODO(mcasas): The following line is correct for N5 with prerelease Build, // but NOT for N7 with a dev Build. Figure out which one to support. mInvertDeviceOrientationReadings = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK; return true; } @Override public boolean startCapture() { Log.d(TAG, "startCapture"); changeCameraStateAndNotify(CameraState.OPENING); final CameraManager manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE); final Handler mainHandler = new Handler(mContext.getMainLooper()); final CrStateListener stateListener = new CrStateListener(); try { manager.openCamera(Integer.toString(mId), stateListener, mainHandler); } catch (CameraAccessException | IllegalArgumentException | SecurityException ex) { Log.e(TAG, "allocate: manager.openCamera: " + ex); return false; } return true; } @Override public boolean stopCapture() { Log.d(TAG, "stopCapture"); // With Camera2 API, the capture is started asynchronously, which will cause problem if // stopCapture comes too quickly. Without stopping the previous capture properly, the next // startCapture will fail and make Chrome no-responding. So wait camera to be STARTED. synchronized (mCameraStateLock) { while (mCameraState != CameraState.STARTED && mCameraState != CameraState.STOPPED) { try { mCameraStateLock.wait(); } catch (InterruptedException ex) { Log.e(TAG, "CaptureStartedEvent: " + ex); } } if (mCameraState == CameraState.STOPPED) return true; } try { mCaptureSession.abortCaptures(); } catch (CameraAccessException | IllegalStateException ex) { Log.e(TAG, "abortCaptures: " + ex); return false; } if (mCameraDevice == null) return false; mCameraDevice.close(); changeCameraStateAndNotify(CameraState.STOPPED); return true; } @Override public void deallocate() { Log.d(TAG, "deallocate"); } }
/* * Waltz - Enterprise Architecture * Copyright (C) 2016, 2017, 2018, 2019 Waltz open source project * See README.md for more information * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific * */ package com.khartec.waltz.service.measurable_relationship; import com.khartec.waltz.data.EntityReferenceNameResolver; import com.khartec.waltz.data.entity_relationship.EntityRelationshipDao; import com.khartec.waltz.model.EntityKind; import com.khartec.waltz.model.EntityReference; import com.khartec.waltz.model.Operation; import com.khartec.waltz.model.Severity; import com.khartec.waltz.model.changelog.ImmutableChangeLog; import com.khartec.waltz.model.entity_relationship.EntityRelationship; import com.khartec.waltz.model.entity_relationship.EntityRelationshipKey; import com.khartec.waltz.model.entity_relationship.ImmutableEntityRelationship; import com.khartec.waltz.model.entity_relationship.UpdateEntityRelationshipParams; import com.khartec.waltz.service.changelog.ChangeLogService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.Collection; import java.util.List; import java.util.Map; import static com.khartec.waltz.common.Checks.checkNotNull; import static com.khartec.waltz.common.ListUtilities.map; import static com.khartec.waltz.common.ListUtilities.newArrayList; import static java.lang.String.format; @Service public class MeasurableRelationshipService { private final EntityRelationshipDao entityRelationshipDao; private final ChangeLogService changeLogService; private final EntityReferenceNameResolver entityReferenceNameResolver; @Autowired public MeasurableRelationshipService(EntityRelationshipDao entityRelationshipDao, EntityReferenceNameResolver entityReferenceNameResolver, ChangeLogService changeLogService) { checkNotNull(entityRelationshipDao, "entityRelationshipDao cannot be null"); checkNotNull(entityReferenceNameResolver, "entityReferenceNameResolver cannot be null"); checkNotNull(changeLogService, "changeLogService cannot be null"); this.entityRelationshipDao = entityRelationshipDao; this.entityReferenceNameResolver = entityReferenceNameResolver; this.changeLogService = changeLogService; } public Collection<EntityRelationship> findForEntityReference(EntityReference entityReference) { checkNotNull(entityReference, "entityReference cannot be null"); return entityRelationshipDao .findRelationshipsInvolving(entityReference); } public Map<EntityKind, Integer> tallyForEntityReference(EntityReference entityReference) { checkNotNull(entityReference, "entityReference cannot be null"); return entityRelationshipDao .tallyRelationshipsInvolving(entityReference); } public boolean remove(EntityRelationshipKey command, String username) { boolean result = entityRelationshipDao.remove(command); if (result) { logRemoval(command, username); } return result; } public boolean create(String userName, EntityReference entityRefA, EntityReference entityRefB, String relationshipKind, String description) { EntityRelationship relationship = ImmutableEntityRelationship.builder() .a(entityRefA) .b(entityRefB) .relationship(relationshipKind) .description(description) .lastUpdatedBy(userName) .build(); boolean result = entityRelationshipDao.create(relationship); if (result) { logAddition(relationship); } return result; } public boolean update(EntityRelationshipKey key, UpdateEntityRelationshipParams params, String username) { boolean result = entityRelationshipDao.update(key, params, username); if (result) { logUpdate(key, params, username); } return result; } // --- helpers --- private void logUpdate(EntityRelationshipKey key, UpdateEntityRelationshipParams params, String username) { List<String> niceNames = resolveNames( key.a(), key.b()); String paramStr = ""; paramStr += params.relationshipKind() != null ? " Relationship: " + params.relationshipKind() : ""; paramStr += params.description() != null ? " Updated description" : ""; String msg = format( "Updated explicit relationship from: '%s', to: '%s', with params: '%s'", niceNames.get(0), niceNames.get(1), paramStr); writeLog( Operation.UPDATE, key.a(), msg, username); writeLog( Operation.UPDATE, key.b(), msg, username); } private void logRemoval(EntityRelationshipKey key, String username) { List<String> niceNames = resolveNames( key.a(), key.b()); String msg = format( "Removed explicit relationship: '%s' from: '%s', to: '%s'", key.relationshipKind(), niceNames.get(0), niceNames.get(1)); writeLog( Operation.REMOVE, key.a(), msg, username); writeLog( Operation.REMOVE, key.b(), msg, username); } private void logAddition(EntityRelationship relationship) { List<String> niceNames = resolveNames( relationship.a(), relationship.b()); String msg = format( "Added explicit relationship: '%s' from: '%s', to: '%s'", relationship.relationship(), niceNames.get(0), niceNames.get(1)); writeLog( Operation.ADD, relationship.a(), msg, relationship.lastUpdatedBy()); writeLog( Operation.ADD, relationship.b(), msg, relationship.lastUpdatedBy()); } private void writeLog(Operation op, EntityReference a, String message, String username) { ImmutableChangeLog logEntry = ImmutableChangeLog.builder() .severity(Severity.INFORMATION) .operation(op) .parentReference(a) .userId(username) .message(message) .build(); changeLogService.write(logEntry); } private List<String> resolveNames(EntityReference... refs) { return map( entityReferenceNameResolver.resolve(newArrayList(refs)), r -> r.name().orElse("?")); } }
/* * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.socket.sockjs.transport.session; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledFuture; import java.util.stream.Collectors; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.core.NestedExceptionUtils; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.web.socket.CloseStatus; import org.springframework.web.socket.TextMessage; import org.springframework.web.socket.WebSocketHandler; import org.springframework.web.socket.WebSocketMessage; import org.springframework.web.socket.sockjs.SockJsMessageDeliveryException; import org.springframework.web.socket.sockjs.SockJsTransportFailureException; import org.springframework.web.socket.sockjs.frame.SockJsFrame; import org.springframework.web.socket.sockjs.frame.SockJsMessageCodec; import org.springframework.web.socket.sockjs.transport.SockJsServiceConfig; import org.springframework.web.socket.sockjs.transport.SockJsSession; /** * An abstract base class for SockJS sessions implementing {@link SockJsSession}. * * @author Rossen Stoyanchev * @author Sam Brannen * @since 4.0 */ public abstract class AbstractSockJsSession implements SockJsSession { private enum State {NEW, OPEN, CLOSED} /** * Log category to use on network IO exceptions after a client has gone away. * <p>Servlet containers don't expose a client disconnected callback; see * <a href="https://github.com/eclipse-ee4j/servlet-api/issues/44">eclipse-ee4j/servlet-api#44</a>. * Therefore network IO failures may occur simply because a client has gone away, * and that can fill the logs with unnecessary stack traces. * <p>We make a best effort to identify such network failures, on a per-server * basis, and log them under a separate log category. A simple one-line message * is logged at DEBUG level, while a full stack trace is shown at TRACE level. * @see #disconnectedClientLogger */ public static final String DISCONNECTED_CLIENT_LOG_CATEGORY = "org.springframework.web.socket.sockjs.DisconnectedClient"; /** * Tomcat: ClientAbortException or EOFException * Jetty: EofException * WildFly, GlassFish: java.io.IOException "Broken pipe" (already covered) * <p>TODO: * This definition is currently duplicated between HttpWebHandlerAdapter * and AbstractSockJsSession. It is a candidate for a common utility class. * @see #indicatesDisconnectedClient(Throwable) */ private static final Set<String> DISCONNECTED_CLIENT_EXCEPTIONS = new HashSet<>(Arrays.asList("ClientAbortException", "EOFException", "EofException")); /** * Separate logger to use on network IO failure after a client has gone away. * @see #DISCONNECTED_CLIENT_LOG_CATEGORY */ protected static final Log disconnectedClientLogger = LogFactory.getLog(DISCONNECTED_CLIENT_LOG_CATEGORY); protected final Log logger = LogFactory.getLog(getClass()); protected final Object responseLock = new Object(); private final String id; private final SockJsServiceConfig config; private final WebSocketHandler handler; private final Map<String, Object> attributes = new ConcurrentHashMap<>(); private volatile State state = State.NEW; private final long timeCreated = System.currentTimeMillis(); private volatile long timeLastActive = this.timeCreated; @Nullable private ScheduledFuture<?> heartbeatFuture; @Nullable private HeartbeatTask heartbeatTask; private volatile boolean heartbeatDisabled; /** * Create a new instance. * @param id the session ID * @param config the SockJS service configuration options * @param handler the recipient of SockJS messages * @param attributes the attributes from the HTTP handshake to associate with the WebSocket * session; the provided attributes are copied, the original map is not used. */ public AbstractSockJsSession(String id, SockJsServiceConfig config, WebSocketHandler handler, @Nullable Map<String, Object> attributes) { Assert.notNull(id, "Session id must not be null"); Assert.notNull(config, "SockJsServiceConfig must not be null"); Assert.notNull(handler, "WebSocketHandler must not be null"); this.id = id; this.config = config; this.handler = handler; if (attributes != null) { this.attributes.putAll(attributes); } } @Override public String getId() { return this.id; } protected SockJsMessageCodec getMessageCodec() { return this.config.getMessageCodec(); } public SockJsServiceConfig getSockJsServiceConfig() { return this.config; } @Override public Map<String, Object> getAttributes() { return this.attributes; } // Message sending @Override public final void sendMessage(WebSocketMessage<?> message) throws IOException { Assert.state(!isClosed(), "Cannot send a message when session is closed"); Assert.isInstanceOf(TextMessage.class, message, "SockJS supports text messages only"); sendMessageInternal(((TextMessage) message).getPayload()); } protected abstract void sendMessageInternal(String message) throws IOException; // Lifecycle related methods public boolean isNew() { return State.NEW.equals(this.state); } @Override public boolean isOpen() { return State.OPEN.equals(this.state); } public boolean isClosed() { return State.CLOSED.equals(this.state); } /** * Performs cleanup and notify the {@link WebSocketHandler}. */ @Override public final void close() throws IOException { close(new CloseStatus(3000, "Go away!")); } /** * Performs cleanup and notify the {@link WebSocketHandler}. */ @Override public final void close(CloseStatus status) throws IOException { if (isOpen()) { if (logger.isDebugEnabled()) { logger.debug("Closing SockJS session " + getId() + " with " + status); } this.state = State.CLOSED; try { if (isActive() && !CloseStatus.SESSION_NOT_RELIABLE.equals(status)) { try { writeFrameInternal(SockJsFrame.closeFrame(status.getCode(), status.getReason())); } catch (Throwable ex) { logger.debug("Failure while sending SockJS close frame", ex); } } updateLastActiveTime(); cancelHeartbeat(); disconnect(status); } finally { try { this.handler.afterConnectionClosed(this, status); } catch (Throwable ex) { logger.debug("Error from WebSocketHandler.afterConnectionClosed in " + this, ex); } } } } @Override public long getTimeSinceLastActive() { if (isNew()) { return (System.currentTimeMillis() - this.timeCreated); } else { return (isActive() ? 0 : System.currentTimeMillis() - this.timeLastActive); } } /** * Should be invoked whenever the session becomes inactive. */ protected void updateLastActiveTime() { this.timeLastActive = System.currentTimeMillis(); } @Override public void disableHeartbeat() { this.heartbeatDisabled = true; cancelHeartbeat(); } protected void sendHeartbeat() throws SockJsTransportFailureException { synchronized (this.responseLock) { if (isActive() && !this.heartbeatDisabled) { writeFrame(SockJsFrame.heartbeatFrame()); scheduleHeartbeat(); } } } protected void scheduleHeartbeat() { if (this.heartbeatDisabled) { return; } synchronized (this.responseLock) { cancelHeartbeat(); if (!isActive()) { return; } Date time = new Date(System.currentTimeMillis() + this.config.getHeartbeatTime()); this.heartbeatTask = new HeartbeatTask(); this.heartbeatFuture = this.config.getTaskScheduler().schedule(this.heartbeatTask, time); if (logger.isTraceEnabled()) { logger.trace("Scheduled heartbeat in session " + getId()); } } } protected void cancelHeartbeat() { synchronized (this.responseLock) { if (this.heartbeatFuture != null) { if (logger.isTraceEnabled()) { logger.trace("Cancelling heartbeat in session " + getId()); } this.heartbeatFuture.cancel(false); this.heartbeatFuture = null; } if (this.heartbeatTask != null) { this.heartbeatTask.cancel(); this.heartbeatTask = null; } } } /** * Polling and Streaming sessions periodically close the current HTTP request and * wait for the next request to come through. During this "downtime" the session is * still open but inactive and unable to send messages and therefore has to buffer * them temporarily. A WebSocket session by contrast is stateful and remain active * until closed. */ public abstract boolean isActive(); /** * Actually close the underlying WebSocket session or in the case of HTTP * transports complete the underlying request. */ protected abstract void disconnect(CloseStatus status) throws IOException; // Frame writing /** * For internal use within a TransportHandler and the (TransportHandler-specific) * session class. */ protected void writeFrame(SockJsFrame frame) throws SockJsTransportFailureException { if (logger.isTraceEnabled()) { logger.trace("Preparing to write " + frame); } try { writeFrameInternal(frame); } catch (Exception ex) { logWriteFrameFailure(ex); try { // Force disconnect (so we won't try to send close frame) disconnect(CloseStatus.SERVER_ERROR); } catch (Throwable disconnectFailure) { // Ignore } try { close(CloseStatus.SERVER_ERROR); } catch (Throwable closeFailure) { // Nothing of consequence, already forced disconnect } throw new SockJsTransportFailureException("Failed to write " + frame, getId(), ex); } } protected abstract void writeFrameInternal(SockJsFrame frame) throws IOException; private void logWriteFrameFailure(Throwable ex) { if (indicatesDisconnectedClient(ex)) { if (disconnectedClientLogger.isTraceEnabled()) { disconnectedClientLogger.trace("Looks like the client has gone away", ex); } else if (disconnectedClientLogger.isDebugEnabled()) { disconnectedClientLogger.debug("Looks like the client has gone away: " + ex + " (For a full stack trace, set the log category '" + DISCONNECTED_CLIENT_LOG_CATEGORY + "' to TRACE level.)"); } } else { logger.debug("Terminating connection after failure to send message to client", ex); } } private boolean indicatesDisconnectedClient(Throwable ex) { String message = NestedExceptionUtils.getMostSpecificCause(ex).getMessage(); message = (message != null ? message.toLowerCase() : ""); String className = ex.getClass().getSimpleName(); return (message.contains("broken pipe") || DISCONNECTED_CLIENT_EXCEPTIONS.contains(className)); } // Delegation methods public void delegateConnectionEstablished() throws Exception { this.state = State.OPEN; this.handler.afterConnectionEstablished(this); } public void delegateMessages(String... messages) throws SockJsMessageDeliveryException { for (int i = 0; i < messages.length; i++) { try { if (isClosed()) { logUndeliveredMessages(i, messages); return; } this.handler.handleMessage(this, new TextMessage(messages[i])); } catch (Exception ex) { if (isClosed()) { if (logger.isTraceEnabled()) { logger.trace("Failed to handle message '" + messages[i] + "'", ex); } logUndeliveredMessages(i, messages); return; } throw new SockJsMessageDeliveryException(this.id, getUndelivered(messages, i), ex); } } } private void logUndeliveredMessages(int index, String[] messages) { List<String> undelivered = getUndelivered(messages, index); if (logger.isTraceEnabled() && !undelivered.isEmpty()) { logger.trace("Dropped inbound message(s) due to closed session: " + undelivered); } } private static List<String> getUndelivered(String[] messages, int i) { return switch (messages.length - i) { case 0 -> Collections.emptyList(); case 1 -> (messages[i].trim().isEmpty() ? Collections.<String>emptyList() : Collections.singletonList(messages[i])); default -> Arrays.stream(Arrays.copyOfRange(messages, i, messages.length)) .filter(message -> !message.trim().isEmpty()) .collect(Collectors.toList()); }; } /** * Invoked when the underlying connection is closed. */ public final void delegateConnectionClosed(CloseStatus status) throws Exception { if (!isClosed()) { try { updateLastActiveTime(); // Avoid cancelHeartbeat() and responseLock within server "close" callback ScheduledFuture<?> future = this.heartbeatFuture; if (future != null) { this.heartbeatFuture = null; future.cancel(false); } } finally { this.state = State.CLOSED; this.handler.afterConnectionClosed(this, status); } } } /** * Close due to error arising from SockJS transport handling. */ public void tryCloseWithSockJsTransportError(Throwable error, CloseStatus closeStatus) { if (logger.isDebugEnabled()) { logger.debug("Closing due to transport error for " + this); } try { delegateError(error); } catch (Throwable delegateException) { // Ignore logger.debug("Exception from error handling delegate", delegateException); } try { close(closeStatus); } catch (Throwable closeException) { logger.debug("Failure while closing " + this, closeException); } } public void delegateError(Throwable ex) throws Exception { this.handler.handleTransportError(this, ex); } // Self description @Override public String toString() { return getClass().getSimpleName() + "[id=" + getId() + "]"; } private class HeartbeatTask implements Runnable { private boolean expired; @Override public void run() { synchronized (responseLock) { if (!this.expired && !isClosed()) { try { sendHeartbeat(); } catch (Throwable ex) { // Ignore: already handled in writeFrame... } finally { this.expired = true; } } } } void cancel() { this.expired = true; } } }
/* * Copyright 2002-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.util.xml; import java.util.LinkedHashMap; import java.util.Map; import javax.xml.namespace.QName; import javax.xml.stream.Location; import javax.xml.stream.XMLStreamException; import org.xml.sax.InputSource; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.SAXParseException; import org.springframework.util.StringUtils; /** * Abstract base class for SAX {@code XMLReader} implementations that use StAX as a basis. * * @author Arjen Poutsma * @since 3.0 * @see #setContentHandler(org.xml.sax.ContentHandler) * @see #setDTDHandler(org.xml.sax.DTDHandler) * @see #setEntityResolver(org.xml.sax.EntityResolver) * @see #setErrorHandler(org.xml.sax.ErrorHandler) */ abstract class AbstractStaxXMLReader extends AbstractXMLReader { private static final String NAMESPACES_FEATURE_NAME = "http://xml.org/sax/features/namespaces"; private static final String NAMESPACE_PREFIXES_FEATURE_NAME = "http://xml.org/sax/features/namespace-prefixes"; private static final String IS_STANDALONE_FEATURE_NAME = "http://xml.org/sax/features/is-standalone"; private boolean namespacesFeature = true; private boolean namespacePrefixesFeature = false; private Boolean isStandalone; private final Map<String, String> namespaces = new LinkedHashMap<String, String>(); @Override public boolean getFeature(String name) throws SAXNotRecognizedException, SAXNotSupportedException { if (NAMESPACES_FEATURE_NAME.equals(name)) { return this.namespacesFeature; } else if (NAMESPACE_PREFIXES_FEATURE_NAME.equals(name)) { return this.namespacePrefixesFeature; } else if (IS_STANDALONE_FEATURE_NAME.equals(name)) { if (this.isStandalone != null) { return this.isStandalone; } else { throw new SAXNotSupportedException("startDocument() callback not completed yet"); } } else { return super.getFeature(name); } } @Override public void setFeature(String name, boolean value) throws SAXNotRecognizedException, SAXNotSupportedException { if (NAMESPACES_FEATURE_NAME.equals(name)) { this.namespacesFeature = value; } else if (NAMESPACE_PREFIXES_FEATURE_NAME.equals(name)) { this.namespacePrefixesFeature = value; } else { super.setFeature(name, value); } } protected void setStandalone(boolean standalone) { this.isStandalone = standalone; } /** * Indicates whether the SAX feature {@code http://xml.org/sax/features/namespaces} is turned on. */ protected boolean hasNamespacesFeature() { return this.namespacesFeature; } /** * Indicates whether the SAX feature {@code http://xml.org/sax/features/namespaces-prefixes} is turned on. */ protected boolean hasNamespacePrefixesFeature() { return this.namespacePrefixesFeature; } /** * Convert a {@code QName} to a qualified name, as used by DOM and SAX. * The returned string has a format of {@code prefix:localName} if the * prefix is set, or just {@code localName} if not. * @param qName the {@code QName} * @return the qualified name */ protected String toQualifiedName(QName qName) { String prefix = qName.getPrefix(); if (!StringUtils.hasLength(prefix)) { return qName.getLocalPart(); } else { return prefix + ":" + qName.getLocalPart(); } } /** * Parse the StAX XML reader passed at construction-time. * <p><b>NOTE:</b>: The given {@code InputSource} is not read, but ignored. * @param ignored is ignored * @throws SAXException a SAX exception, possibly wrapping a {@code XMLStreamException} */ @Override public final void parse(InputSource ignored) throws SAXException { parse(); } /** * Parse the StAX XML reader passed at construction-time. * <p><b>NOTE:</b>: The given system identifier is not read, but ignored. * @param ignored is ignored * @throws SAXException A SAX exception, possibly wrapping a {@code XMLStreamException} */ @Override public final void parse(String ignored) throws SAXException { parse(); } private void parse() throws SAXException { try { parseInternal(); } catch (XMLStreamException ex) { Locator locator = null; if (ex.getLocation() != null) { locator = new StaxLocator(ex.getLocation()); } SAXParseException saxException = new SAXParseException(ex.getMessage(), locator, ex); if (getErrorHandler() != null) { getErrorHandler().fatalError(saxException); } else { throw saxException; } } } /** * Template-method that parses the StAX reader passed at construction-time. */ protected abstract void parseInternal() throws SAXException, XMLStreamException; /** * Starts the prefix mapping for the given prefix. * @see org.xml.sax.ContentHandler#startPrefixMapping(String, String) */ protected void startPrefixMapping(String prefix, String namespace) throws SAXException { if (getContentHandler() != null) { if (prefix == null) { prefix = ""; } if (!StringUtils.hasLength(namespace)) { return; } if (!namespace.equals(namespaces.get(prefix))) { getContentHandler().startPrefixMapping(prefix, namespace); namespaces.put(prefix, namespace); } } } /** * Ends the prefix mapping for the given prefix. * @see org.xml.sax.ContentHandler#endPrefixMapping(String) */ protected void endPrefixMapping(String prefix) throws SAXException { if (getContentHandler() != null) { if (namespaces.containsKey(prefix)) { getContentHandler().endPrefixMapping(prefix); namespaces.remove(prefix); } } } /** * Implementation of the {@code Locator} interface that is based on a StAX {@code Location}. * @see Locator * @see Location */ private static class StaxLocator implements Locator { private Location location; protected StaxLocator(Location location) { this.location = location; } @Override public String getPublicId() { return location.getPublicId(); } @Override public String getSystemId() { return location.getSystemId(); } @Override public int getLineNumber() { return location.getLineNumber(); } @Override public int getColumnNumber() { return location.getColumnNumber(); } } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; import com.google.common.collect.ImmutableMap; import com.google.common.io.ByteStreams; import com.google.devtools.build.java.bazel.BazelJavaCompiler; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Opcodes; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.RoundEnvironment; import javax.lang.model.element.TypeElement; import javax.tools.Diagnostic; import javax.tools.DiagnosticCollector; import javax.tools.JavaCompiler; import javax.tools.JavaFileObject; import javax.tools.StandardJavaFileManager; import javax.tools.StandardLocation; /** * JUnit tests for ijar tool. */ @RunWith(JUnit4.class) public class IjarTests { private static File getTmpDir() { String tmpdir = System.getenv("TEST_TMPDIR"); if (tmpdir == null) { // Fall back on the system temporary directory tmpdir = System.getProperty("java.io.tmpdir"); } if (tmpdir == null) { fail("TEST_TMPDIR environment variable is not set!"); } return new File(tmpdir); } DiagnosticCollector<JavaFileObject> diagnostics; private JavaCompiler.CompilationTask makeCompilationTask(String... files) throws IOException { JavaCompiler compiler = BazelJavaCompiler.newInstance(); StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null); fileManager.setLocation(StandardLocation.CLASS_PATH, Arrays.asList(new File("third_party/ijar/test/interface_ijar_testlib.jar"))); fileManager.setLocation(StandardLocation.CLASS_OUTPUT, Arrays.asList(getTmpDir())); diagnostics = new DiagnosticCollector<JavaFileObject>(); return compiler.getTask(null, fileManager, diagnostics, Arrays.asList("-Xlint:deprecation"), // used for deprecation tests null, fileManager.getJavaFileObjects(files)); } /** * Test that the ijar tool preserves private nested classes as they * may be exposed through public API. This test relies on an * interface jar provided through the build rule * :interface_ijar_testlib and the Java source file * PrivateNestedClass.java. */ @Test public void testPrivateNestedClass() throws IOException { if (!makeCompilationTask("third_party/ijar/test/PrivateNestedClass.java").call()) { fail(getFailedCompilationMessage()); } } /** * Test that the ijar tool preserves annotations, especially @Target * meta-annotation. */ @Test public void testRestrictedAnnotations() throws IOException { assertFalse(makeCompilationTask("third_party/ijar/test/UseRestrictedAnnotation.java").call()); } /** * Test that the ijar tool preserves private nested classes as they * may be exposed through public API. This test relies on an * interface jar provided through the build rule * :interface_ijar_testlib and the Java source file * PrivateNestedClass.java. */ @Test public void testDeprecatedParts() throws IOException { if (!makeCompilationTask("third_party/ijar/test/UseDeprecatedParts.java").call()) { fail(getFailedCompilationMessage()); } int deprecatedWarningCount = 0; for (Diagnostic<? extends JavaFileObject> diagnostic : diagnostics.getDiagnostics()) { if ((diagnostic.getKind() == Diagnostic.Kind.MANDATORY_WARNING) && // Java 6: (diagnostic.getMessage(Locale.ENGLISH).startsWith("[deprecation]") || // Java 7: diagnostic.getMessage(Locale.ENGLISH).contains("has been deprecated"))) { deprecatedWarningCount++; } } assertEquals(16, deprecatedWarningCount); } /** * Test that the ijar tool preserves EnclosingMethod attributes and doesn't * prevent annotation processors from accessing all the elements in a package. */ @Test public void testEnclosingMethod() throws IOException { JavaCompiler.CompilationTask task = makeCompilationTask("third_party/ijar/test/package-info.java"); task.setProcessors(Arrays.asList(new AbstractProcessor() { @Override public Set<String> getSupportedAnnotationTypes() { return Collections.singleton("*"); } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { roundEnv.getElementsAnnotatedWith(java.lang.Override.class); return true; } })); if (!task.call()) { fail(getFailedCompilationMessage()); } } @Test public void testVerifyStripping() throws Exception { ZipFile zip = new ZipFile("third_party/ijar/test/interface_ijar_testlib.jar"); Enumeration<? extends ZipEntry> entries = zip.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); ClassReader reader = new ClassReader(zip.getInputStream(entry)); StripVerifyingVisitor verifier = new StripVerifyingVisitor(); reader.accept(verifier, 0); if (verifier.errors.size() > 0) { StringBuilder builder = new StringBuilder(); builder.append("Verification of "); builder.append(entry.getName()); builder.append(" failed: "); for (String msg : verifier.errors) { builder.append(msg); builder.append("\t"); } fail(builder.toString()); } } } private String getFailedCompilationMessage() { StringBuilder builder = new StringBuilder(); builder.append("Build failed unexpectedly"); for (Diagnostic<? extends JavaFileObject> diagnostic : diagnostics.getDiagnostics()) { builder.append(String.format( "\t%s line %d column %d: %s", diagnostic.getKind().toString(), diagnostic.getLineNumber(), diagnostic.getColumnNumber(), diagnostic.getMessage(Locale.ENGLISH))); } return builder.toString(); } @Test public void localAndAnonymous() throws Exception { Map<String, byte[]> lib = readJar("third_party/ijar/test/liblocal_and_anonymous_lib.jar"); Map<String, byte[]> ijar = readJar("third_party/ijar/test/local_and_anonymous-interface.jar"); assertThat(lib.keySet()) .containsExactly( "LocalAndAnonymous$1.class", "LocalAndAnonymous$2.class", "LocalAndAnonymous$1LocalClass.class", "LocalAndAnonymous.class", "LocalAndAnonymous$NestedClass.class", "LocalAndAnonymous$InnerClass.class", "LocalAndAnonymous$PrivateInnerClass.class"); assertThat(ijar.keySet()) .containsExactly( "LocalAndAnonymous.class", "LocalAndAnonymous$NestedClass.class", "LocalAndAnonymous$InnerClass.class", "LocalAndAnonymous$PrivateInnerClass.class"); assertThat(innerClasses(lib.get("LocalAndAnonymous.class"))) .isEqualTo( ImmutableMap.<String, String>builder() .put("LocalAndAnonymous$1", "null") .put("LocalAndAnonymous$2", "null") .put("LocalAndAnonymous$1LocalClass", "null") .put("LocalAndAnonymous$InnerClass", "LocalAndAnonymous") .put("LocalAndAnonymous$NestedClass", "LocalAndAnonymous") .put("LocalAndAnonymous$PrivateInnerClass", "LocalAndAnonymous") .build()); assertThat(innerClasses(ijar.get("LocalAndAnonymous.class"))) .containsExactly( "LocalAndAnonymous$InnerClass", "LocalAndAnonymous", "LocalAndAnonymous$NestedClass", "LocalAndAnonymous", "LocalAndAnonymous$PrivateInnerClass", "LocalAndAnonymous"); } static Map<String, byte[]> readJar(String path) throws IOException { Map<String, byte[]> classes = new HashMap<>(); try (JarFile jf = new JarFile(path)) { Enumeration<JarEntry> entries = jf.entries(); while (entries.hasMoreElements()) { JarEntry je = entries.nextElement(); if (!je.getName().endsWith(".class")) { continue; } classes.put(je.getName(), ByteStreams.toByteArray(jf.getInputStream(je))); } } return classes; } static Map<String, String> innerClasses(byte[] bytes) { final Map<String, String> innerClasses = new HashMap<>(); new ClassReader(bytes) .accept( new ClassVisitor(Opcodes.ASM5) { @Override public void visitInnerClass( String name, String outerName, String innerName, int access) { innerClasses.put(name, String.valueOf(outerName)); } }, /*flags=*/ 0); return innerClasses; } }
/** * SignalAttrSet.java * * Author : Christopher K. Allen * Since : Mar 17, 2014 */ package xal.smf.impl.profile; import java.util.LinkedList; import java.util.List; import xal.ca.ConnectionException; import xal.ca.GetException; import xal.tools.data.DataAdaptor; import xal.tools.data.DataListener; import xal.smf.AcceleratorNode; import xal.smf.scada.ScadaFieldDescriptor; /** * Data structure containing the signal properties of the * the profile data sets acquired from a wire profile device. * * <p> * <b>Ported from XAL on Jul 15, 2014.</b><br> * &middot; Jonathan M. Freed * </p> * * @since Feb 23, 2010 * @author Christopher K. Allen */ public abstract class SignalAttrSet implements DataListener { /* * Global Methods */ // /** // * Creates a new <code>SignalAttrs</code> object according to the information // * annotated in the given class type. The new object is created for the specified // * profile plane (type <code>{@link WireScanner2.ANGLE}</code>) and the information // * for that particular plane is taken from the set of annotations. The annotations // * contain the channel handles needed to create a <code>SignalAttrs</code> object. // * // * @param enmAng profile plane // * @param clsAttrSet class type of the signal attribute set containing proper annotations // * // * @return new <code>SignalAttrs</code> object build from the information // * annotated in the given class // * // * @throws ScadaAnnotationException the <code>ADaqWire.SgnlAttrs</code> annotations are incomplete // * // * @author Christopher K. Allen // * @since Oct 4, 2011 // */ // public static SignalAttrs createSignalAttrs(ANGLE enmAng, Class<? extends SignalAttrSet> clsAttrSet) // throws ScadaAnnotationException // { // // if ( ! clsAttrSet.isAnnotationPresent(ADaqWire.SgnlAttrs.Hor.class) // || ! clsAttrSet.isAnnotationPresent(ADaqWire.SgnlAttrs.Ver.class) // || ! clsAttrSet.isAnnotationPresent(ADaqWire.SgnlAttrs.Dia.class) // ) // throw new ScadaAnnotationException("The 'ADaqWire.SgnlAttrs' annotations are incomplete."); // // switch (enmAng) { // // case HOR: { // ADaqWire.SgnlAttrs.Hor annHor = clsAttrSet.getAnnotation(ADaqWire.SgnlAttrs.Hor.class); // // String hndAmp = annHor.hndAmpRb(); // String hndBase = annHor.hndOffsetRb(); // String hndArea = annHor.hndAreaRb(); // String hndMean = annHor.hndMeanRb(); // String hndStd = annHor.hndStdevRb(); // // return createSignalAttrs(enmAng, hndAmp, hndBase, hndArea, hndMean, hndStd); // } // // case VER: { // ADaqWire.SgnlAttrs.Ver annVer = clsAttrSet.getAnnotation(ADaqWire.SgnlAttrs.Ver.class); // // String hndAmp = annVer.hndAmpRb(); // String hndBase = annVer.hndOffsetRb(); // String hndArea = annVer.hndAreaRb(); // String hndMean = annVer.hndMeanRb(); // String hndStd = annVer.hndStdevRb(); // // return createSignalAttrs(enmAng, hndAmp, hndBase, hndArea, hndMean, hndStd); // } // // case DIA: { // ADaqWire.SgnlAttrs.Dia annDia = clsAttrSet.getAnnotation(ADaqWire.SgnlAttrs.Dia.class); // // String hndAmp = annDia.hndAmpRb(); // String hndBase = annDia.hndOffsetRb(); // String hndArea = annDia.hndAreaRb(); // String hndMean = annDia.hndMeanRb(); // String hndStd = annDia.hndStdevRb(); // // return createSignalAttrs(enmAng, hndAmp, hndBase, hndArea, hndMean, hndStd); // } // // default: // return null; // } // // } // // // /** // * Creates a new <code>SignalAttrs</code> object from the given channel handles and the // * given profile plane. It is assumed that all PV types are <code>double</code> and that // * the PV is read only. // * // * @param enmAng the profile plane of the signal attributes. // * @param hndAmp channel handle of the signal <em>amplitude</em> attribute // * @param hndBase channel handle of the signal <em>offset</em> or <em>baseline</em> attribute // * @param hndArea channel handle of the signal <em>area</em> or <em>integral</em> attribute // * @param hndMean channel handle of the signal <em>mean value</em> attribute // * @param hndStd channel handle of the signal <em>standard deviation</code> attribute // * // * @return new <code>SignalAttrs</code> object built from the above information // * // * @author Christopher K. Allen // * @since Oct 4, 2011 // */ // public static SignalAttrs createSignalAttrs(ANGLE enmAng, String hndAmp, String hndBase, String hndArea, String hndMean, String hndStd) { // // ScadaFieldDescriptor sfdAmp = new ScadaFieldDescriptor(SignalAttrs.ATTRS.AMP.getFieldName(), double.class, hndAmp); // ScadaFieldDescriptor sfdBase = new ScadaFieldDescriptor(SignalAttrs.ATTRS.OFFSET.getFieldName(), double.class, hndBase); // ScadaFieldDescriptor sfdArea = new ScadaFieldDescriptor(SignalAttrs.ATTRS.AREA.getFieldName(), double.class, hndArea); // ScadaFieldDescriptor sfdMean = new ScadaFieldDescriptor(SignalAttrs.ATTRS.MEAN.getFieldName(), double.class, hndMean); // ScadaFieldDescriptor sfdStd = new ScadaFieldDescriptor(SignalAttrs.ATTRS.STDEV.getFieldName(), double.class, hndStd); // // ScadaFieldDescriptor[] arrSfd = { sfdAmp, sfdBase, sfdArea, sfdMean, sfdStd }; // // SignalAttrs saResult = new SignalAttrs(enmAng.getLabel(), arrSfd); // // return saResult; // } /* * Instance Attributes */ /** Horizontal wire signal properties */ public SignalAttrs hor; /** Horizontal wire signal properties */ public SignalAttrs ver; /** Horizontal wire signal properties */ public SignalAttrs dia; /* * Operations */ // /** // * Returns the signal properties data structure // * (i.e., <code>ProfileAttrs</code> object) corresponding // * to the given projection angle. // * // * @param ang projection angle // * // * @return profile signal properties for the given projection angle // * // * @since Apr 23, 2010 // * @author Christopher K. Allen // */ // public SignalAttrs getSignalAttrs(ANGLE ang) { // // switch (ang) { // // case HOR: // return hor; // // case VER: // return ver; // // case DIA: // return dia; // } // // // This shouldn't happen // return null; // } // /* * Operations */ /** * Returns the set of all SCADA field descriptors describing the * data acquisition channels. Since this is an active data structure * these channels are used internally to populate the data fields, * which is profile data taken from the hardware. * <br> * <br> * This <b>will work</b>. If the child class has been annotated by * <code>ASignal.ASet</code> the <code>Signal</code> attributes * will be connected in the zero-argument constructor. * * * @return set of all channel field descriptors used by this data structure * * @author Christopher K. Allen * @since Mar 15, 2011 */ public List<ScadaFieldDescriptor> getFieldDescriptors() { List<ScadaFieldDescriptor> lstFdHor = this.hor.getFieldDescriptors(); List<ScadaFieldDescriptor> lstFdVer = this.ver.getFieldDescriptors(); List<ScadaFieldDescriptor> lstFdDia = this.dia.getFieldDescriptors(); List<ScadaFieldDescriptor> lstFds = new LinkedList<ScadaFieldDescriptor>(); lstFds.addAll(lstFdHor); lstFds.addAll(lstFdVer); lstFds.addAll(lstFdDia); return lstFds; } /* * Operations */ /** * Sets the signal attribute for the given measurement angle to the given signal * attribute object. * * @param angle measurement angle of the modified signal attributes * * @param attrSignal new signal attributes for the given measurement angle * * @author Christopher K. Allen * @since Apr 23, 2014 */ public void setSignalAttrs(ProfileDevice.ANGLE angle, SignalAttrs attrSignal) { switch (angle) { case HOR: this.hor = attrSignal; break; case VER: this.ver = attrSignal; break; case DIA: this.dia = attrSignal; break; default: ; } } /** * Returns the signal attributes of this set corresponding to the give * profile angle. * * @param angle measurement angle of the desired signal properties * * @return signal properties corresponding to the given angle * * @author Christopher K. Allen * @since Apr 23, 2014 */ public SignalAttrs getSignalAttrs(ProfileDevice.ANGLE angle) { switch (angle) { case HOR: return this.hor; case VER: return this.ver; case DIA: return this.dia; default: return null; } } /** * Averages all the signal attributes in the signal attributes set using the given weighting * factor. * See <code>Signal{@link SignalAttrs#average(Signal, double)}</code> for additional information. * * @param setAvg signal set to average into this one * @param dblWtFac averaging magnitude &lambda; &isin; [0,1] * * @throws IllegalArgumentException the provided signal is not the same size as this signal * * @author Christopher K. Allen * @since May 1, 2014 */ public void average(SignalAttrSet setAvg, double dblWtFac) throws IllegalArgumentException { for ( ProfileDevice.ANGLE angle : ProfileDevice.ANGLE.values() ) { SignalAttrs attrsAvg = setAvg.getSignalAttrs(angle); getSignalAttrs(angle).average(attrsAvg, dblWtFac); } } /* * DataListener Interface */ /** * Label used for parameter set identification. * * @return string label (identifier) for parameter set * * @since Mar 4, 2010 * @author Christopher K. Allen * * @see xal.tools.data.DataListener#dataLabel() */ @Override public String dataLabel() { return this.getClass().getCanonicalName(); } /** * Load the contents of this signal traits set * from the persistent store behind the * <code>DataListener</code> interface. * * @param daptSrc data source * * @since Mar 4, 2010 * @author Christopher K. Allen * * @see xal.tools.data.DataListener#update(xal.tools.data.DataAdaptor) */ @Override public void update(DataAdaptor daptSrc) { // Get the node containing this data from the given parent node String strLabel = this.dataLabel(); DataAdaptor daptSgnl = daptSrc.childAdaptor( strLabel ); // Check the format is from the XAL version if (daptSgnl == null) { strLabel = "gov.sns." + strLabel; daptSgnl = daptSrc.childAdaptor(strLabel); } // Were we given the current data node, not the parent node? if (daptSgnl == null) daptSgnl = daptSrc; // Look for the middle version format - Open XAL before the format correction // was made. This one is problematic, we must guess at the order. String strLblOld = SignalAttrs.class.getCanonicalName(); List<DataAdaptor> lstDaptOld = daptSgnl.childAdaptors(strLblOld); // If we are in the middle format, we load sequentially according to index and return. if (lstDaptOld.size() > 0) { for (ProfileDevice.ANGLE angle : ProfileDevice.ANGLE.values()) { int index = angle.getIndex(); DataAdaptor dapt = lstDaptOld.get(index); SignalAttrs attr = this.getSignalAttrs(angle); attr.update(dapt); } return; } // Assume that we have the XAL format or the current format // Read in each signal using the current data format for (ProfileDevice.ANGLE angle : ProfileDevice.ANGLE.values()) { DataAdaptor dapt = daptSgnl.childAdaptor( angle.getLabel() ); SignalAttrs attr = this.getSignalAttrs(angle); attr.update(dapt); } // hor.update(daptSgnl); // ver.update(daptSgnl); // dia.update(daptSgnl); } /** * Write out the contents of this signal traits * set to the given data store. * * @param adaptor data store exposing <code>DataListener</code> interface * * @since Mar 4, 2010 * @author Christopher K. Allen * * @see xal.tools.data.DataListener#write(xal.tools.data.DataAdaptor) */ @Override public void write(DataAdaptor adaptor) { DataAdaptor daptSgnls = adaptor.createChild( this.dataLabel() ); for (ProfileDevice.ANGLE angle : ProfileDevice.ANGLE.values()) { DataAdaptor dapt = daptSgnls.createChild( angle.getLabel() ); SignalAttrs attr = this.getSignalAttrs(angle); attr.write(dapt); } // DataAdaptor daptSig = adaptor.createChild( this.dataLabel() ); // // hor.write(daptSig); // ver.write(daptSig); // dia.write(daptSig); } /* * Initialization */ /** * Create a new <code>ProfileAttrSet</code> object from the annotation * information in the class type. * * @since Feb 25, 2010 * @author Christopher K. Allen */ protected SignalAttrSet() { super(); // If there is not annotations for the channel bindings this is just an // empty data structure if (! this.getClass().isAnnotationPresent(ASignalAttrs.ASet.class)) { this.hor = new SignalAttrs(); this.ver = new SignalAttrs(); this.dia = new SignalAttrs(); return; } // Get the bindings annotation then create and connect the channels ASignalAttrs.ASet annAttrSet = this.getClass().getAnnotation(ASignalAttrs.ASet.class); this.hor = SignalAttrs.createConnectedSignal(annAttrSet.attrHor()); this.ver = SignalAttrs.createConnectedSignal(annAttrSet.attrVer()); this.dia = SignalAttrs.createConnectedSignal(annAttrSet.attrDia()); } /** * Create a new, initialized <code>ProfileAttrSet</code> object. * * @param arrPfdHor set of process variable descriptors for the horizonal signal * @param arrPfdVer set of process variable descriptors for the vertical signal * @param arrPfdDia set of process variable descriptors for the diagonal signal * @param ws hardware device containing initialization data. * * @throws ConnectionException unable to connect to a parameter read back channel * @throws GetException general CA GET exception while fetch field value * @since Feb 25, 2010 * @author Christopher K. Allen */ protected SignalAttrSet(AcceleratorNode ws) throws ConnectionException, GetException { this(); this.hor.loadHardwareValues(ws); this.ver.loadHardwareValues(ws); this.dia.loadHardwareValues(ws); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.events.mongo; import com.mongodb.BasicDBObject; import com.mongodb.DBCollection; import com.mongodb.DBObject; import org.keycloak.events.admin.AdminEvent; import org.keycloak.events.admin.AdminEventQuery; import org.keycloak.events.admin.AuthDetails; import org.keycloak.events.admin.OperationType; import org.keycloak.events.Event; import org.keycloak.events.EventQuery; import org.keycloak.events.EventStoreProvider; import org.keycloak.events.EventType; import java.util.HashMap; import java.util.Map; /** * @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a> */ public class MongoEventStoreProvider implements EventStoreProvider { private DBCollection events; private DBCollection adminEvents; public MongoEventStoreProvider(DBCollection events, DBCollection adminEvents) { this.events = events; this.adminEvents = adminEvents; } @Override public EventQuery createQuery() { return new MongoEventQuery(events); } @Override public void clear() { events.remove(new BasicDBObject()); } @Override public void clear(String realmId) { events.remove(new BasicDBObject("realmId", realmId)); } @Override public void clear(String realmId, long olderThan) { BasicDBObject q = new BasicDBObject(); q.put("realmId", realmId); q.put("time", new BasicDBObject("$lt", olderThan)); events.remove(q); } @Override public void onEvent(Event event) { events.insert(convertEvent(event)); } @Override public AdminEventQuery createAdminQuery() { return new MongoAdminEventQuery(adminEvents); } @Override public void clearAdmin() { adminEvents.remove(new BasicDBObject()); } @Override public void clearAdmin(String realmId) { adminEvents.remove(new BasicDBObject("realmId", realmId)); } @Override public void clearAdmin(String realmId, long olderThan) { BasicDBObject q = new BasicDBObject(); q.put("realmId", realmId); q.put("time", new BasicDBObject("$lt", olderThan)); adminEvents.remove(q); } @Override public void onEvent(AdminEvent adminEvent, boolean includeRepresentation) { adminEvents.insert(convertAdminEvent(adminEvent, includeRepresentation)); } @Override public void close() { } static DBObject convertEvent(Event event) { BasicDBObject e = new BasicDBObject(); e.put("time", event.getTime()); e.put("type", event.getType().toString()); e.put("realmId", event.getRealmId()); e.put("clientId", event.getClientId()); e.put("userId", event.getUserId()); e.put("sessionId", event.getSessionId()); e.put("ipAddress", event.getIpAddress()); e.put("error", event.getError()); BasicDBObject details = new BasicDBObject(); if (event.getDetails() != null) { for (Map.Entry<String, String> entry : event.getDetails().entrySet()) { details.put(entry.getKey(), entry.getValue()); } } e.put("details", details); return e; } static Event convertEvent(BasicDBObject o) { Event event = new Event(); event.setTime(o.getLong("time")); event.setType(EventType.valueOf(o.getString("type"))); event.setRealmId(o.getString("realmId")); event.setClientId(o.getString("clientId")); event.setUserId(o.getString("userId")); event.setSessionId(o.getString("sessionId")); event.setIpAddress(o.getString("ipAddress")); event.setError(o.getString("error")); BasicDBObject d = (BasicDBObject) o.get("details"); if (d != null) { Map<String, String> details = new HashMap<String, String>(); for (Object k : d.keySet()) { details.put((String) k, d.getString((String) k)); } event.setDetails(details); } return event; } private static DBObject convertAdminEvent(AdminEvent adminEvent, boolean includeRepresentation) { BasicDBObject e = new BasicDBObject(); e.put("time", adminEvent.getTime()); e.put("realmId", adminEvent.getRealmId()); e.put("operationType", adminEvent.getOperationType().toString()); setAuthDetails(e, adminEvent.getAuthDetails()); e.put("resourcePath", adminEvent.getResourcePath()); e.put("error", adminEvent.getError()); if(includeRepresentation) { e.put("representation", adminEvent.getRepresentation()); } return e; } static AdminEvent convertAdminEvent(BasicDBObject o) { AdminEvent adminEvent = new AdminEvent(); adminEvent.setTime(o.getLong("time")); adminEvent.setRealmId(o.getString("realmId")); adminEvent.setOperationType(OperationType.valueOf(o.getString("operationType"))); setAuthDetails(adminEvent, o); adminEvent.setResourcePath(o.getString("resourcePath")); adminEvent.setError(o.getString("error")); if(o.getString("representation") != null) { adminEvent.setRepresentation(o.getString("representation")); } return adminEvent; } private static void setAuthDetails(BasicDBObject e, AuthDetails authDetails) { e.put("authRealmId", authDetails.getRealmId()); e.put("authClientId", authDetails.getClientId()); e.put("authUserId", authDetails.getUserId()); e.put("authIpAddress", authDetails.getIpAddress()); } private static void setAuthDetails(AdminEvent adminEvent, BasicDBObject o) { AuthDetails authDetails = new AuthDetails(); authDetails.setRealmId(o.getString("authRealmId")); authDetails.setClientId(o.getString("authClientId")); authDetails.setUserId(o.getString("authUserId")); authDetails.setIpAddress(o.getString("authIpAddress")); adminEvent.setAuthDetails(authDetails); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.discovery.zen; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoveryStats; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.fd.MasterFaultDetection; import org.elasticsearch.discovery.zen.fd.NodesFaultDetection; import org.elasticsearch.discovery.zen.membership.MembershipAction; import org.elasticsearch.discovery.zen.ping.PingContextProvider; import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.discovery.zen.ping.ZenPingService; import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; /** * */ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implements Discovery, PingContextProvider { public final static Setting<TimeValue> PING_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.ping_timeout", timeValueSeconds(3), Property.NodeScope); public final static Setting<TimeValue> JOIN_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.join_timeout", settings -> TimeValue.timeValueMillis(PING_TIMEOUT_SETTING.get(settings).millis() * 20).toString(), TimeValue.timeValueMillis(0), Property.NodeScope); public final static Setting<Integer> JOIN_RETRY_ATTEMPTS_SETTING = Setting.intSetting("discovery.zen.join_retry_attempts", 3, 1, Property.NodeScope); public final static Setting<TimeValue> JOIN_RETRY_DELAY_SETTING = Setting.positiveTimeSetting("discovery.zen.join_retry_delay", TimeValue.timeValueMillis(100), Property.NodeScope); public final static Setting<Integer> MAX_PINGS_FROM_ANOTHER_MASTER_SETTING = Setting.intSetting("discovery.zen.max_pings_from_another_master", 3, 1, Property.NodeScope); public final static Setting<Boolean> SEND_LEAVE_REQUEST_SETTING = Setting.boolSetting("discovery.zen.send_leave_request", true, Property.NodeScope); public final static Setting<TimeValue> MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.master_election.wait_for_joins_timeout", settings -> TimeValue.timeValueMillis(JOIN_TIMEOUT_SETTING.get(settings).millis() / 2).toString(), TimeValue.timeValueMillis(0), Property.NodeScope); public final static Setting<Boolean> MASTER_ELECTION_IGNORE_NON_MASTER_PINGS_SETTING = Setting.boolSetting("discovery.zen.master_election.ignore_non_master_pings", false, Property.NodeScope); public static final String DISCOVERY_REJOIN_ACTION_NAME = "internal:discovery/zen/rejoin"; private final TransportService transportService; private final ClusterService clusterService; private RoutingService routingService; private final ClusterName clusterName; private final DiscoverySettings discoverySettings; private final ZenPingService pingService; private final MasterFaultDetection masterFD; private final NodesFaultDetection nodesFD; private final PublishClusterStateAction publishClusterState; private final MembershipAction membership; private final TimeValue pingTimeout; private final TimeValue joinTimeout; /** how many retry attempts to perform if join request failed with an retriable error */ private final int joinRetryAttempts; /** how long to wait before performing another join attempt after a join request failed with an retriable error */ private final TimeValue joinRetryDelay; /** how many pings from *another* master to tolerate before forcing a rejoin on other or local master */ private final int maxPingsFromAnotherMaster; // a flag that should be used only for testing private final boolean sendLeaveRequest; private final ElectMasterService electMaster; private final boolean masterElectionIgnoreNonMasters; private final TimeValue masterElectionWaitForJoinsTimeout; private final JoinThreadControl joinThreadControl; /** counts the time this node has joined the cluster or have elected it self as master */ private final AtomicLong clusterJoinsCounter = new AtomicLong(); // must initialized in doStart(), when we have the routingService set private volatile NodeJoinController nodeJoinController; @Inject public ZenDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, final ClusterService clusterService, ClusterSettings clusterSettings, ZenPingService pingService, ElectMasterService electMasterService) { super(settings); this.clusterName = clusterName; this.clusterService = clusterService; this.transportService = transportService; this.discoverySettings = new DiscoverySettings(settings, clusterSettings); this.pingService = pingService; this.electMaster = electMasterService; this.pingTimeout = PING_TIMEOUT_SETTING.get(settings); this.joinTimeout = JOIN_TIMEOUT_SETTING.get(settings); this.joinRetryAttempts = JOIN_RETRY_ATTEMPTS_SETTING.get(settings); this.joinRetryDelay = JOIN_RETRY_DELAY_SETTING.get(settings); this.maxPingsFromAnotherMaster = MAX_PINGS_FROM_ANOTHER_MASTER_SETTING.get(settings); this.sendLeaveRequest = SEND_LEAVE_REQUEST_SETTING.get(settings); this.masterElectionIgnoreNonMasters = MASTER_ELECTION_IGNORE_NON_MASTER_PINGS_SETTING.get(settings); this.masterElectionWaitForJoinsTimeout = MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING.get(settings); logger.debug("using ping_timeout [{}], join.timeout [{}], master_election.ignore_non_master [{}]", this.pingTimeout, joinTimeout, masterElectionIgnoreNonMasters); clusterSettings.addSettingsUpdateConsumer(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, this::handleMinimumMasterNodesChanged, (value) -> { final ClusterState clusterState = clusterService.state(); int masterNodes = clusterState.nodes().getMasterNodes().size(); if (value > masterNodes) { throw new IllegalArgumentException("cannot set " + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() + " to more than the current master nodes count [" + masterNodes + "]"); } }); this.masterFD = new MasterFaultDetection(settings, threadPool, transportService, clusterName, clusterService); this.masterFD.addListener(new MasterNodeFailureListener()); this.nodesFD = new NodesFaultDetection(settings, threadPool, transportService, clusterName); this.nodesFD.addListener(new NodeFaultDetectionListener()); this.publishClusterState = new PublishClusterStateAction( settings, transportService, clusterService::state, new NewPendingClusterStateListener(), discoverySettings, clusterName); this.pingService.setPingContextProvider(this); this.membership = new MembershipAction(settings, clusterService, transportService, this, new MembershipListener()); this.joinThreadControl = new JoinThreadControl(threadPool); transportService.registerRequestHandler( DISCOVERY_REJOIN_ACTION_NAME, RejoinClusterRequest::new, ThreadPool.Names.SAME, new RejoinClusterRequestHandler()); } @Override public void setRoutingService(RoutingService routingService) { this.routingService = routingService; } @Override protected void doStart() { nodesFD.setLocalNode(clusterService.localNode()); joinThreadControl.start(); pingService.start(); this.nodeJoinController = new NodeJoinController(clusterService, routingService, electMaster, discoverySettings, settings); } @Override public void startInitialJoin() { // start the join thread from a cluster state update. See {@link JoinThreadControl} for details. clusterService.submitStateUpdateTask("initial_join", new ClusterStateUpdateTask() { @Override public boolean runOnlyOnMaster() { return false; } @Override public ClusterState execute(ClusterState currentState) throws Exception { // do the join on a different thread, the DiscoveryService waits for 30s anyhow till it is discovered joinThreadControl.startNewThreadIfNotRunning(); return currentState; } @Override public void onFailure(String source, @org.elasticsearch.common.Nullable Throwable t) { logger.warn("failed to start initial join process", t); } }); } @Override protected void doStop() { joinThreadControl.stop(); pingService.stop(); masterFD.stop("zen disco stop"); nodesFD.stop(); DiscoveryNodes nodes = nodes(); if (sendLeaveRequest) { if (nodes.getMasterNode() == null) { // if we don't know who the master is, nothing to do here } else if (!nodes.isLocalNodeElectedMaster()) { try { membership.sendLeaveRequestBlocking(nodes.getMasterNode(), nodes.getLocalNode(), TimeValue.timeValueSeconds(1)); } catch (Exception e) { logger.debug("failed to send leave request to master [{}]", e, nodes.getMasterNode()); } } else { // we're master -> let other potential master we left and start a master election now rather then wait for masterFD DiscoveryNode[] possibleMasters = electMaster.nextPossibleMasters(nodes.getNodes().values(), 5); for (DiscoveryNode possibleMaster : possibleMasters) { if (nodes.getLocalNode().equals(possibleMaster)) { continue; } try { membership.sendLeaveRequest(nodes.getLocalNode(), possibleMaster); } catch (Exception e) { logger.debug("failed to send leave request from master [{}] to possible master [{}]", e, nodes.getMasterNode(), possibleMaster); } } } } } @Override protected void doClose() { masterFD.close(); nodesFD.close(); publishClusterState.close(); membership.close(); pingService.close(); } @Override public DiscoveryNode localNode() { return clusterService.localNode(); } @Override public String nodeDescription() { return clusterName.value() + "/" + clusterService.localNode().getId(); } /** start of {@link org.elasticsearch.discovery.zen.ping.PingContextProvider } implementation */ @Override public DiscoveryNodes nodes() { return clusterService.state().nodes(); } @Override public boolean nodeHasJoinedClusterOnce() { return clusterJoinsCounter.get() > 0; } /** end of {@link org.elasticsearch.discovery.zen.ping.PingContextProvider } implementation */ @Override public void publish(ClusterChangedEvent clusterChangedEvent, AckListener ackListener) { if (!clusterChangedEvent.state().getNodes().isLocalNodeElectedMaster()) { throw new IllegalStateException("Shouldn't publish state when not master"); } nodesFD.updateNodesAndPing(clusterChangedEvent.state()); try { publishClusterState.publish(clusterChangedEvent, electMaster.minimumMasterNodes(), ackListener); } catch (FailedToCommitClusterStateException t) { // cluster service logs a WARN message logger.debug("failed to publish cluster state version [{}] (not enough nodes acknowledged, min master nodes [{}])", clusterChangedEvent.state().version(), electMaster.minimumMasterNodes()); clusterService.submitStateUpdateTask("zen-disco-failed-to-publish", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) { return rejoin(currentState, "failed to publish to min_master_nodes"); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } }); throw t; } } @Override public DiscoveryStats stats() { PendingClusterStateStats queueStats = publishClusterState.pendingStatesQueue().stats(); return new DiscoveryStats(queueStats); } @Override public DiscoverySettings getDiscoverySettings() { return discoverySettings; } @Override public int getMinimumMasterNodes() { return electMaster.minimumMasterNodes(); } /** * returns true if zen discovery is started and there is a currently a background thread active for (re)joining * the cluster used for testing. */ public boolean joiningCluster() { return joinThreadControl.joinThreadActive(); } // used for testing public ClusterState[] pendingClusterStates() { return publishClusterState.pendingStatesQueue().pendingClusterStates(); } /** * the main function of a join thread. This function is guaranteed to join the cluster * or spawn a new join thread upon failure to do so. */ private void innerJoinCluster() { DiscoveryNode masterNode = null; final Thread currentThread = Thread.currentThread(); nodeJoinController.startAccumulatingJoins(); while (masterNode == null && joinThreadControl.joinThreadActive(currentThread)) { masterNode = findMaster(); } if (!joinThreadControl.joinThreadActive(currentThread)) { logger.trace("thread is no longer in currentJoinThread. Stopping."); return; } if (clusterService.localNode().equals(masterNode)) { final int requiredJoins = Math.max(0, electMaster.minimumMasterNodes() - 1); // we count as one logger.debug("elected as master, waiting for incoming joins ([{}] needed)", requiredJoins); nodeJoinController.waitToBeElectedAsMaster(requiredJoins, masterElectionWaitForJoinsTimeout, new NodeJoinController.ElectionCallback() { @Override public void onElectedAsMaster(ClusterState state) { joinThreadControl.markThreadAsDone(currentThread); // we only starts nodesFD if we are master (it may be that we received a cluster state while pinging) nodesFD.updateNodesAndPing(state); // start the nodes FD long count = clusterJoinsCounter.incrementAndGet(); logger.trace("cluster joins counter set to [{}] (elected as master)", count); } @Override public void onFailure(Throwable t) { logger.trace("failed while waiting for nodes to join, rejoining", t); joinThreadControl.markThreadAsDoneAndStartNew(currentThread); } } ); } else { // process any incoming joins (they will fail because we are not the master) nodeJoinController.stopAccumulatingJoins("not master"); // send join request final boolean success = joinElectedMaster(masterNode); // finalize join through the cluster state update thread final DiscoveryNode finalMasterNode = masterNode; clusterService.submitStateUpdateTask("finalize_join (" + masterNode + ")", new ClusterStateUpdateTask() { @Override public boolean runOnlyOnMaster() { return false; } @Override public ClusterState execute(ClusterState currentState) throws Exception { if (!success) { // failed to join. Try again... joinThreadControl.markThreadAsDoneAndStartNew(currentThread); return currentState; } if (currentState.getNodes().getMasterNode() == null) { // Post 1.3.0, the master should publish a new cluster state before acking our join request. we now should have // a valid master. logger.debug("no master node is set, despite of join request completing. retrying pings."); joinThreadControl.markThreadAsDoneAndStartNew(currentThread); return currentState; } if (!currentState.getNodes().getMasterNode().equals(finalMasterNode)) { return joinThreadControl.stopRunningThreadAndRejoin(currentState, "master_switched_while_finalizing_join"); } // Note: we do not have to start master fault detection here because it's set at {@link #processNextPendingClusterState } // when the first cluster state arrives. joinThreadControl.markThreadAsDone(currentThread); return currentState; } @Override public void onFailure(String source, @Nullable Throwable t) { logger.error("unexpected error while trying to finalize cluster join", t); joinThreadControl.markThreadAsDoneAndStartNew(currentThread); } }); } } /** * Join a newly elected master. * * @return true if successful */ private boolean joinElectedMaster(DiscoveryNode masterNode) { try { // first, make sure we can connect to the master transportService.connectToNode(masterNode); } catch (Exception e) { logger.warn("failed to connect to master [{}], retrying...", e, masterNode); return false; } int joinAttempt = 0; // we retry on illegal state if the master is not yet ready while (true) { try { logger.trace("joining master {}", masterNode); membership.sendJoinRequestBlocking(masterNode, clusterService.localNode(), joinTimeout); return true; } catch (Throwable t) { Throwable unwrap = ExceptionsHelper.unwrapCause(t); if (unwrap instanceof NotMasterException) { if (++joinAttempt == this.joinRetryAttempts) { logger.info("failed to send join request to master [{}], reason [{}], tried [{}] times", masterNode, ExceptionsHelper.detailedMessage(t), joinAttempt); return false; } else { logger.trace("master {} failed with [{}]. retrying... (attempts done: [{}])", masterNode, ExceptionsHelper.detailedMessage(t), joinAttempt); } } else { if (logger.isTraceEnabled()) { logger.trace("failed to send join request to master [{}]", t, masterNode); } else { logger.info("failed to send join request to master [{}], reason [{}]", masterNode, ExceptionsHelper.detailedMessage(t)); } return false; } } try { Thread.sleep(this.joinRetryDelay.millis()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } private void handleLeaveRequest(final DiscoveryNode node) { if (lifecycleState() != Lifecycle.State.STARTED) { // not started, ignore a node failure return; } if (localNodeMaster()) { clusterService.submitStateUpdateTask("zen-disco-node_left(" + node + ")", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(currentState.nodes()).remove(node.getId()); currentState = ClusterState.builder(currentState).nodes(builder).build(); // check if we have enough master nodes, if not, we need to move into joining the cluster again if (!electMaster.hasEnoughMasterNodes(currentState.nodes())) { return rejoin(currentState, "not enough master nodes"); } // eagerly run reroute to remove dead nodes from routing table RoutingAllocation.Result routingResult = routingService.getAllocationService().reroute( ClusterState.builder(currentState).build(), "[" + node + "] left"); return ClusterState.builder(currentState).routingResult(routingResult).build(); } @Override public void onNoLongerMaster(String source) { // ignoring (already logged) } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } }); } else if (node.equals(nodes().getMasterNode())) { handleMasterGone(node, null, "shut_down"); } } private void handleNodeFailure(final DiscoveryNode node, String reason) { if (lifecycleState() != Lifecycle.State.STARTED) { // not started, ignore a node failure return; } if (!localNodeMaster()) { // nothing to do here... return; } clusterService.submitStateUpdateTask("zen-disco-node_failed(" + node + "), reason " + reason, new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) { if (currentState.nodes().get(node.getId()) == null) { logger.debug("node [{}] already removed from cluster state. ignoring.", node); return currentState; } DiscoveryNodes.Builder builder = DiscoveryNodes.builder(currentState.nodes()) .remove(node.getId()); currentState = ClusterState.builder(currentState).nodes(builder).build(); // check if we have enough master nodes, if not, we need to move into joining the cluster again if (!electMaster.hasEnoughMasterNodes(currentState.nodes())) { return rejoin(currentState, "not enough master nodes"); } // eagerly run reroute to remove dead nodes from routing table RoutingAllocation.Result routingResult = routingService.getAllocationService().reroute( ClusterState.builder(currentState).build(), "[" + node + "] failed"); return ClusterState.builder(currentState).routingResult(routingResult).build(); } @Override public void onNoLongerMaster(String source) { // already logged } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { } }); } private void handleMinimumMasterNodesChanged(final int minimumMasterNodes) { if (lifecycleState() != Lifecycle.State.STARTED) { // not started, ignore a node failure return; } final int prevMinimumMasterNode = ZenDiscovery.this.electMaster.minimumMasterNodes(); ZenDiscovery.this.electMaster.minimumMasterNodes(minimumMasterNodes); if (!localNodeMaster()) { // We only set the new value. If the master doesn't see enough nodes it will revoke it's mastership. return; } clusterService.submitStateUpdateTask("zen-disco-minimum_master_nodes_changed", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) { // check if we have enough master nodes, if not, we need to move into joining the cluster again if (!electMaster.hasEnoughMasterNodes(currentState.nodes())) { return rejoin(currentState, "not enough master nodes on change of minimum_master_nodes from [" + prevMinimumMasterNode + "] to [" + minimumMasterNodes + "]"); } return currentState; } @Override public void onNoLongerMaster(String source) { // ignoring (already logged) } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { electMaster.logMinimumMasterNodesWarningIfNecessary(oldState, newState); } }); } private void handleMasterGone(final DiscoveryNode masterNode, final Throwable cause, final String reason) { if (lifecycleState() != Lifecycle.State.STARTED) { // not started, ignore a master failure return; } if (localNodeMaster()) { // we might get this on both a master telling us shutting down, and then the disconnect failure return; } logger.info("master_left [{}], reason [{}]", cause, masterNode, reason); clusterService.submitStateUpdateTask("zen-disco-master_failed (" + masterNode + ")", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public boolean runOnlyOnMaster() { return false; } @Override public ClusterState execute(ClusterState currentState) { if (!masterNode.getId().equals(currentState.nodes().getMasterNodeId())) { // master got switched on us, no need to send anything return currentState; } DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(currentState.nodes()) // make sure the old master node, which has failed, is not part of the nodes we publish .remove(masterNode.getId()) .masterNodeId(null).build(); // flush any pending cluster states from old master, so it will not be set as master again publishClusterState.pendingStatesQueue().failAllStatesAndClear(new ElasticsearchException("master left [{}]", reason)); return rejoin(ClusterState.builder(currentState).nodes(discoveryNodes).build(), "master left (reason = " + reason + ")"); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { } }); } void processNextPendingClusterState(String reason) { clusterService.submitStateUpdateTask("zen-disco-receive(from master [" + reason + "])", new ClusterStateUpdateTask(Priority.URGENT) { @Override public boolean runOnlyOnMaster() { return false; } ClusterState newClusterState = null; @Override public ClusterState execute(ClusterState currentState) { newClusterState = publishClusterState.pendingStatesQueue().getNextClusterStateToProcess(); // all pending states have been processed if (newClusterState == null) { return currentState; } assert newClusterState.nodes().getMasterNode() != null : "received a cluster state without a master"; assert !newClusterState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock()) : "received a cluster state with a master block"; if (currentState.nodes().isLocalNodeElectedMaster()) { return handleAnotherMaster(currentState, newClusterState.nodes().getMasterNode(), newClusterState.version(), "via a new cluster state"); } if (shouldIgnoreOrRejectNewClusterState(logger, currentState, newClusterState)) { return currentState; } // check to see that we monitor the correct master of the cluster if (masterFD.masterNode() == null || !masterFD.masterNode().equals(newClusterState.nodes().getMasterNode())) { masterFD.restart(newClusterState.nodes().getMasterNode(), "new cluster state received and we are monitoring the wrong master [" + masterFD.masterNode() + "]"); } if (currentState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock())) { // its a fresh update from the master as we transition from a start of not having a master to having one logger.debug("got first state from fresh master [{}]", newClusterState.nodes().getMasterNodeId()); long count = clusterJoinsCounter.incrementAndGet(); logger.trace("updated cluster join cluster to [{}]", count); return newClusterState; } // some optimizations to make sure we keep old objects where possible ClusterState.Builder builder = ClusterState.builder(newClusterState); // if the routing table did not change, use the original one if (newClusterState.routingTable().version() == currentState.routingTable().version()) { builder.routingTable(currentState.routingTable()); } // same for metadata if (newClusterState.metaData().version() == currentState.metaData().version()) { builder.metaData(currentState.metaData()); } else { // if its not the same version, only copy over new indices or ones that changed the version MetaData.Builder metaDataBuilder = MetaData.builder(newClusterState.metaData()).removeAllIndices(); for (IndexMetaData indexMetaData : newClusterState.metaData()) { IndexMetaData currentIndexMetaData = currentState.metaData().index(indexMetaData.getIndex()); if (currentIndexMetaData != null && currentIndexMetaData.isSameUUID(indexMetaData.getIndexUUID()) && currentIndexMetaData.getVersion() == indexMetaData.getVersion()) { // safe to reuse metaDataBuilder.put(currentIndexMetaData, false); } else { metaDataBuilder.put(indexMetaData, false); } } builder.metaData(metaDataBuilder); } return builder.build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); if (newClusterState != null) { try { publishClusterState.pendingStatesQueue().markAsFailed(newClusterState, t); } catch (Throwable unexpected) { logger.error("unexpected exception while failing [{}]", unexpected, source); } } } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { try { if (newClusterState != null) { publishClusterState.pendingStatesQueue().markAsProcessed(newClusterState); } } catch (Throwable t) { onFailure(source, t); } } }); } /** * In the case we follow an elected master the new cluster state needs to have the same elected master and * the new cluster state version needs to be equal or higher than our cluster state version. * If the first condition fails we reject the cluster state and throw an error. * If the second condition fails we ignore the cluster state. */ public static boolean shouldIgnoreOrRejectNewClusterState(ESLogger logger, ClusterState currentState, ClusterState newClusterState) { validateStateIsFromCurrentMaster(logger, currentState.nodes(), newClusterState); // reject cluster states that are not new from the same master if (currentState.supersedes(newClusterState) || (newClusterState.nodes().getMasterNodeId().equals(currentState.nodes().getMasterNodeId()) && currentState.version() == newClusterState.version())) { // if the new state has a smaller version, and it has the same master node, then no need to process it logger.debug("received a cluster state that is not newer than the current one, ignoring (received {}, current {})", newClusterState.version(), currentState.version()); return true; } // reject older cluster states if we are following a master if (currentState.nodes().getMasterNodeId() != null && newClusterState.version() < currentState.version()) { logger.debug("received a cluster state that has a lower version than the current one, ignoring (received {}, current {})", newClusterState.version(), currentState.version()); return true; } return false; } /** * In the case we follow an elected master the new cluster state needs to have the same elected master * This method checks for this and throws an exception if needed */ public static void validateStateIsFromCurrentMaster(ESLogger logger, DiscoveryNodes currentNodes, ClusterState newClusterState) { if (currentNodes.getMasterNodeId() == null) { return; } if (!currentNodes.getMasterNodeId().equals(newClusterState.nodes().getMasterNodeId())) { logger.warn("received a cluster state from a different master than the current one, rejecting (received {}, current {})", newClusterState.nodes().getMasterNode(), currentNodes.getMasterNode()); throw new IllegalStateException("cluster state from a different master than the current one, rejecting (received " + newClusterState.nodes().getMasterNode() + ", current " + currentNodes.getMasterNode() + ")"); } } void handleJoinRequest(final DiscoveryNode node, final ClusterState state, final MembershipAction.JoinCallback callback) { if (!transportService.addressSupported(node.getAddress().getClass())) { // TODO, what should we do now? Maybe inform that node that its crap? logger.warn("received a wrong address type from [{}], ignoring...", node); } else if (nodeJoinController == null) { throw new IllegalStateException("discovery module is not yet started"); } else { // The minimum supported version for a node joining a master: Version minimumNodeJoinVersion = localNode().getVersion().minimumCompatibilityVersion(); // Sanity check: maybe we don't end up here, because serialization may have failed. if (node.getVersion().before(minimumNodeJoinVersion)) { callback.onFailure( new IllegalStateException("Can't handle join request from a node with a version [" + node.getVersion() + "] that is lower than the minimum compatible version [" + minimumNodeJoinVersion.minimumCompatibilityVersion() + "]") ); return; } // try and connect to the node, if it fails, we can raise an exception back to the client... transportService.connectToNode(node); // validate the join request, will throw a failure if it fails, which will get back to the // node calling the join request try { membership.sendValidateJoinRequestBlocking(node, state, joinTimeout); } catch (Throwable e) { logger.warn("failed to validate incoming join request from node [{}]", e, node); callback.onFailure(new IllegalStateException("failure when sending a validation request to node", e)); return; } nodeJoinController.handleJoinRequest(node, callback); } } private DiscoveryNode findMaster() { logger.trace("starting to ping"); ZenPing.PingResponse[] fullPingResponses = pingService.pingAndWait(pingTimeout); if (fullPingResponses == null) { logger.trace("No full ping responses"); return null; } if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder(); if (fullPingResponses.length == 0) { sb.append(" {none}"); } else { for (ZenPing.PingResponse pingResponse : fullPingResponses) { sb.append("\n\t--> ").append(pingResponse); } } logger.trace("full ping responses:{}", sb); } // filter responses final List<ZenPing.PingResponse> pingResponses = filterPingResponses(fullPingResponses, masterElectionIgnoreNonMasters, logger); final DiscoveryNode localNode = clusterService.localNode(); List<DiscoveryNode> pingMasters = new ArrayList<>(); for (ZenPing.PingResponse pingResponse : pingResponses) { if (pingResponse.master() != null) { // We can't include the local node in pingMasters list, otherwise we may up electing ourselves without // any check / verifications from other nodes in ZenDiscover#innerJoinCluster() if (!localNode.equals(pingResponse.master())) { pingMasters.add(pingResponse.master()); } } } // nodes discovered during pinging Set<DiscoveryNode> activeNodes = new HashSet<>(); // nodes discovered who has previously been part of the cluster and do not ping for the very first time Set<DiscoveryNode> joinedOnceActiveNodes = new HashSet<>(); if (localNode.isMasterNode()) { activeNodes.add(localNode); long joinsCounter = clusterJoinsCounter.get(); if (joinsCounter > 0) { logger.trace("adding local node to the list of active nodes that have previously joined the cluster (joins counter is [{}])", joinsCounter); joinedOnceActiveNodes.add(localNode); } } for (ZenPing.PingResponse pingResponse : pingResponses) { activeNodes.add(pingResponse.node()); if (pingResponse.hasJoinedOnce()) { joinedOnceActiveNodes.add(pingResponse.node()); } } if (pingMasters.isEmpty()) { if (electMaster.hasEnoughMasterNodes(activeNodes)) { // we give preference to nodes who have previously already joined the cluster. Those will // have a cluster state in memory, including an up to date routing table (which is not persistent to disk // by the gateway) DiscoveryNode master = electMaster.electMaster(joinedOnceActiveNodes); if (master != null) { return master; } return electMaster.electMaster(activeNodes); } else { // if we don't have enough master nodes, we bail, because there are not enough master to elect from logger.trace("not enough master nodes [{}]", activeNodes); return null; } } else { assert !pingMasters.contains(localNode) : "local node should never be elected as master when other nodes indicate an active master"; // lets tie break between discovered nodes return electMaster.electMaster(pingMasters); } } static List<ZenPing.PingResponse> filterPingResponses(ZenPing.PingResponse[] fullPingResponses, boolean masterElectionIgnoreNonMasters, ESLogger logger) { List<ZenPing.PingResponse> pingResponses; if (masterElectionIgnoreNonMasters) { pingResponses = Arrays.stream(fullPingResponses).filter(ping -> ping.node().isMasterNode()).collect(Collectors.toList()); } else { pingResponses = Arrays.asList(fullPingResponses); } if (logger.isDebugEnabled()) { StringBuilder sb = new StringBuilder(); if (pingResponses.isEmpty()) { sb.append(" {none}"); } else { for (ZenPing.PingResponse pingResponse : pingResponses) { sb.append("\n\t--> ").append(pingResponse); } } logger.debug("filtered ping responses: (ignore_non_masters [{}]){}", masterElectionIgnoreNonMasters, sb); } return pingResponses; } protected ClusterState rejoin(ClusterState clusterState, String reason) { // *** called from within an cluster state update task *** // assert Thread.currentThread().getName().contains(ClusterService.UPDATE_THREAD_NAME); logger.warn("{}, current nodes: {}", reason, clusterState.nodes()); nodesFD.stop(); masterFD.stop(reason); ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(clusterState.blocks()) .addGlobalBlock(discoverySettings.getNoMasterBlock()) .build(); // clean the nodes, we are now not connected to anybody, since we try and reform the cluster DiscoveryNodes discoveryNodes = new DiscoveryNodes.Builder(clusterState.nodes()).masterNodeId(null).build(); // TODO: do we want to force a new thread if we actively removed the master? this is to give a full pinging cycle // before a decision is made. joinThreadControl.startNewThreadIfNotRunning(); return ClusterState.builder(clusterState) .blocks(clusterBlocks) .nodes(discoveryNodes) .build(); } private boolean localNodeMaster() { return nodes().isLocalNodeElectedMaster(); } private ClusterState handleAnotherMaster(ClusterState localClusterState, final DiscoveryNode otherMaster, long otherClusterStateVersion, String reason) { assert localClusterState.nodes().isLocalNodeElectedMaster() : "handleAnotherMaster called but current node is not a master"; assert Thread.currentThread().getName().contains(ClusterService.UPDATE_THREAD_NAME) : "not called from the cluster state update thread"; if (otherClusterStateVersion > localClusterState.version()) { return rejoin(localClusterState, "zen-disco-discovered another master with a new cluster_state [" + otherMaster + "][" + reason + "]"); } else { logger.warn("discovered [{}] which is also master but with an older cluster_state, telling [{}] to rejoin the cluster ([{}])", otherMaster, otherMaster, reason); try { // make sure we're connected to this node (connect to node does nothing if we're already connected) // since the network connections are asymmetric, it may be that we received a state but have disconnected from the node // in the past (after a master failure, for example) transportService.connectToNode(otherMaster); transportService.sendRequest(otherMaster, DISCOVERY_REJOIN_ACTION_NAME, new RejoinClusterRequest(localClusterState.nodes().getLocalNodeId()), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { @Override public void handleException(TransportException exp) { logger.warn("failed to send rejoin request to [{}]", exp, otherMaster); } }); } catch (Exception e) { logger.warn("failed to send rejoin request to [{}]", e, otherMaster); } return localClusterState; } } private class NewPendingClusterStateListener implements PublishClusterStateAction.NewPendingClusterStateListener { @Override public void onNewClusterState(String reason) { processNextPendingClusterState(reason); } } private class MembershipListener implements MembershipAction.MembershipListener { @Override public void onJoin(DiscoveryNode node, MembershipAction.JoinCallback callback) { handleJoinRequest(node, clusterService.state(), callback); } @Override public void onLeave(DiscoveryNode node) { handleLeaveRequest(node); } } private class NodeFaultDetectionListener extends NodesFaultDetection.Listener { private final AtomicInteger pingsWhileMaster = new AtomicInteger(0); @Override public void onNodeFailure(DiscoveryNode node, String reason) { handleNodeFailure(node, reason); } @Override public void onPingReceived(final NodesFaultDetection.PingRequest pingRequest) { // if we are master, we don't expect any fault detection from another node. If we get it // means we potentially have two masters in the cluster. if (!localNodeMaster()) { pingsWhileMaster.set(0); return; } if (pingsWhileMaster.incrementAndGet() < maxPingsFromAnotherMaster) { logger.trace("got a ping from another master {}. current ping count: [{}]", pingRequest.masterNode(), pingsWhileMaster.get()); return; } logger.debug("got a ping from another master {}. resolving who should rejoin. current ping count: [{}]", pingRequest.masterNode(), pingsWhileMaster.get()); clusterService.submitStateUpdateTask("ping from another master", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) throws Exception { pingsWhileMaster.set(0); return handleAnotherMaster(currentState, pingRequest.masterNode(), pingRequest.clusterStateVersion(), "node fd ping"); } @Override public void onFailure(String source, Throwable t) { logger.debug("unexpected error during cluster state update task after pings from another master", t); } }); } } private class MasterNodeFailureListener implements MasterFaultDetection.Listener { @Override public void onMasterFailure(DiscoveryNode masterNode, Throwable cause, String reason) { handleMasterGone(masterNode, cause, reason); } } public static class RejoinClusterRequest extends TransportRequest { private String fromNodeId; RejoinClusterRequest(String fromNodeId) { this.fromNodeId = fromNodeId; } public RejoinClusterRequest() { } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); fromNodeId = in.readOptionalString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalString(fromNodeId); } } class RejoinClusterRequestHandler implements TransportRequestHandler<RejoinClusterRequest> { @Override public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel) throws Exception { clusterService.submitStateUpdateTask("received a request to rejoin the cluster from [" + request.fromNodeId + "]", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public boolean runOnlyOnMaster() { return false; } @Override public ClusterState execute(ClusterState currentState) { try { channel.sendResponse(TransportResponse.Empty.INSTANCE); } catch (Exception e) { logger.warn("failed to send response on rejoin cluster request handling", e); } return rejoin(currentState, "received a request to rejoin the cluster from [" + request.fromNodeId + "]"); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } }); } } /** * All control of the join thread should happen under the cluster state update task thread. * This is important to make sure that the background joining process is always in sync with any cluster state updates * like master loss, failure to join, received cluster state while joining etc. */ private class JoinThreadControl { private final ThreadPool threadPool; private final AtomicBoolean running = new AtomicBoolean(false); private final AtomicReference<Thread> currentJoinThread = new AtomicReference<>(); public JoinThreadControl(ThreadPool threadPool) { this.threadPool = threadPool; } /** returns true if join thread control is started and there is currently an active join thread */ public boolean joinThreadActive() { Thread currentThread = currentJoinThread.get(); return running.get() && currentThread != null && currentThread.isAlive(); } /** returns true if join thread control is started and the supplied thread is the currently active joinThread */ public boolean joinThreadActive(Thread joinThread) { return running.get() && joinThread.equals(currentJoinThread.get()); } /** cleans any running joining thread and calls {@link #rejoin} */ public ClusterState stopRunningThreadAndRejoin(ClusterState clusterState, String reason) { ClusterService.assertClusterStateThread(); currentJoinThread.set(null); return rejoin(clusterState, reason); } /** starts a new joining thread if there is no currently active one and join thread controlling is started */ public void startNewThreadIfNotRunning() { ClusterService.assertClusterStateThread(); if (joinThreadActive()) { return; } threadPool.generic().execute(new Runnable() { @Override public void run() { Thread currentThread = Thread.currentThread(); if (!currentJoinThread.compareAndSet(null, currentThread)) { return; } while (running.get() && joinThreadActive(currentThread)) { try { innerJoinCluster(); return; } catch (Exception e) { logger.error("unexpected error while joining cluster, trying again", e); // Because we catch any exception here, we want to know in // tests if an uncaught exception got to this point and the test infra uncaught exception // leak detection can catch this. In practise no uncaught exception should leak assert ExceptionsHelper.reThrowIfNotNull(e); } } // cleaning the current thread from currentJoinThread is done by explicit calls. } }); } /** * marks the given joinThread as completed and makes sure another thread is running (starting one if needed) * If the given thread is not the currently running join thread, the command is ignored. */ public void markThreadAsDoneAndStartNew(Thread joinThread) { ClusterService.assertClusterStateThread(); if (!markThreadAsDone(joinThread)) { return; } startNewThreadIfNotRunning(); } /** marks the given joinThread as completed. Returns false if the supplied thread is not the currently active join thread */ public boolean markThreadAsDone(Thread joinThread) { ClusterService.assertClusterStateThread(); return currentJoinThread.compareAndSet(joinThread, null); } public void stop() { running.set(false); Thread joinThread = currentJoinThread.getAndSet(null); if (joinThread != null) { joinThread.interrupt(); } } public void start() { running.set(true); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.carbondata.integration.spark.load; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.carbondata.core.carbon.datastore.block.Distributable; import org.apache.carbondata.core.carbon.datastore.block.TableBlockInfo; import org.apache.carbondata.spark.load.CarbonLoaderUtil; import org.junit.Assert; import org.junit.Test; /** * Test class to test block distribution functionality */ public class CarbonLoaderUtilTest { List<Distributable> blockInfos = null; int noOfNodesInput = -1; List<String> activeNode = null; Map<String, List<Distributable>> expected = null; Map<String, List<Distributable>> mapOfNodes = null; @Test public void nodeBlockMapping() throws Exception { // scenario when the 3 nodes and 3 executors initSet1(); Map<String, List<Distributable>> mapOfNodes = CarbonLoaderUtil.nodeBlockMapping(blockInfos, noOfNodesInput, activeNode); // node allocation Assert.assertTrue("Node Allocation", expected.size() == mapOfNodes.size()); // block allocation boolean isEqual = compareResult(expected, mapOfNodes); Assert.assertTrue("Block Allocation", isEqual); // 2 node and 3 executors initSet2(); mapOfNodes = CarbonLoaderUtil.nodeBlockMapping(blockInfos, noOfNodesInput, activeNode); // node allocation Assert.assertTrue("Node Allocation", expected.size() == mapOfNodes.size()); // block allocation isEqual = compareResult(expected, mapOfNodes); Assert.assertTrue("Block Allocation", isEqual); // 3 data node and 2 executors initSet3(); mapOfNodes = CarbonLoaderUtil.nodeBlockMapping(blockInfos, noOfNodesInput, activeNode); // node allocation Assert.assertTrue("Node Allocation", expected.size() == mapOfNodes.size()); // block allocation isEqual = compareResult(expected, mapOfNodes); Assert.assertTrue("Block Allocation", isEqual); } /** * compares the blocks allocation * * @param expectedResult * @param actualResult * @return */ private boolean compareResult(Map<String, List<Distributable>> expectedResult, Map<String, List<Distributable>> actualResult) { expectedResult = sortByListSize(expectedResult); actualResult = sortByListSize(actualResult); List<List<Distributable>> expectedList = new LinkedList(expectedResult.entrySet()); List<List<Distributable>> mapOfNodesList = new LinkedList(actualResult.entrySet()); boolean isEqual = expectedList.size() == mapOfNodesList.size(); if (isEqual) { for (int i = 0; i < expectedList.size(); i++) { int size1 = ((List) ((Map.Entry) (expectedList.get(i))).getValue()).size(); int size2 = ((List) ((Map.Entry) (mapOfNodesList.get(i))).getValue()).size(); isEqual = size1 == size2; if (!isEqual) { break; } } } return isEqual; } /** * sort by list size * * @param map * @return */ private static Map<String, List<Distributable>> sortByListSize( Map<String, List<Distributable>> map) { List<List<Distributable>> list = new LinkedList(map.entrySet()); Collections.sort(list, new Comparator() { public int compare(Object obj1, Object obj2) { if (obj1 == null && obj2 == null) { return 0; } else if (obj1 == null) { return 1; } else if (obj2 == null) { return -1; } int size1 = ((List) ((Map.Entry) (obj1)).getValue()).size(); int size2 = ((List) ((Map.Entry) (obj2)).getValue()).size(); return size2 - size1; } }); Map res = new LinkedHashMap(); for (Iterator it = list.iterator(); it.hasNext(); ) { Map.Entry entry = (Map.Entry) it.next(); res.put(entry.getKey(), entry.getValue()); } return res; } void initSet1() { blockInfos = new ArrayList<>(); activeNode = new ArrayList<>(); activeNode.add("node-7"); activeNode.add("node-9"); activeNode.add("node-11"); String[] location = { "node-7", "node-9", "node-11" }; blockInfos.add(new TableBlockInfo("node", 1, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 2, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 3, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 4, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 5, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 6, "1", location, 0)); expected = new HashMap<>(); expected.put("node-7", blockInfos.subList(0, 2)); expected.put("node-9", blockInfos.subList(2, 4)); expected.put("node-11", blockInfos.subList(4, 6)); } void initSet2() { blockInfos = new ArrayList<>(); activeNode = new ArrayList<>(); activeNode.add("node-7"); activeNode.add("node-9"); activeNode.add("node-11"); String[] location = { "node-7", "node-11" }; blockInfos.add(new TableBlockInfo("node", 1, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 2, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 3, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 4, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 5, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 6, "1", location, 0)); expected = new HashMap<>(); expected.put("node-7", blockInfos.subList(0, 2)); expected.put("node-9", blockInfos.subList(2, 4)); expected.put("node-11", blockInfos.subList(4, 6)); } void initSet3() { blockInfos = new ArrayList<>(); activeNode = new ArrayList<>(); activeNode.add("node-7"); activeNode.add("node-11"); String[] location = { "node-7", "node-9", "node-11" }; blockInfos.add(new TableBlockInfo("node", 1, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 2, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 3, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 4, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 5, "1", location, 0)); blockInfos.add(new TableBlockInfo("node", 6, "1", location, 0)); expected = new HashMap<>(); expected.put("node-7", blockInfos.subList(0, 3)); expected.put("node-11", blockInfos.subList(3, 6)); } /** * Test case with 4 blocks and 4 nodes with 3 replication. * * @throws Exception */ @Test public void nodeBlockMapping() throws Exception { Map<TableBlockInfo, List<String>> inputMap = new HashMap<TableBlockInfo, List<String>>(5); TableBlockInfo block1 = new TableBlockInfo("path1", 123, "1", new String[] { "1", "2", "3" }, 111); TableBlockInfo block2 = new TableBlockInfo("path2", 123, "2", new String[] { "2", "3", "4" }, 111); TableBlockInfo block3 = new TableBlockInfo("path3", 123, "3", new String[] { "3", "4", "1" }, 111); TableBlockInfo block4 = new TableBlockInfo("path4", 123, "4", new String[] { "1", "2", "4" }, 111); inputMap.put(block1, Arrays.asList(new String[]{"1","2","3"})); inputMap.put(block2, Arrays.asList(new String[]{"2","3","4"})); inputMap.put(block3, Arrays.asList(new String[]{"3","4","1"})); inputMap.put(block4, Arrays.asList(new String[]{"1","2","4"})); List<TableBlockInfo> inputBlocks = new ArrayList(6); inputBlocks.add(block1); inputBlocks.add(block2); inputBlocks.add(block3); inputBlocks.add(block4); Map<String, List<TableBlockInfo>> outputMap = CarbonLoaderUtil.nodeBlockMapping(inputBlocks, 4); Assert.assertTrue(calculateBlockDistribution(inputMap, outputMap, 4, 4)); Assert.assertTrue(calculateBlockLocality(inputMap, outputMap, 4, 4)); } private boolean calculateBlockLocality(Map<TableBlockInfo, List<String>> inputMap, Map<String, List<TableBlockInfo>> outputMap, int numberOfBlocks, int numberOfNodes) { double notInNodeLocality = 0; for (Map.Entry<String, List<TableBlockInfo>> entry : outputMap.entrySet()) { List<TableBlockInfo> blockListOfANode = entry.getValue(); for (TableBlockInfo eachBlock : blockListOfANode) { // for each block check the node locality List<String> blockLocality = inputMap.get(eachBlock); if (!blockLocality.contains(entry.getKey())) { notInNodeLocality++; } } } System.out.println( ((notInNodeLocality / numberOfBlocks) * 100) + " " + "is the node locality mismatch"); if ((notInNodeLocality / numberOfBlocks) * 100 > 30) { return false; } return true; } private boolean calculateBlockDistribution(Map<TableBlockInfo, List<String>> inputMap, Map<String, List<TableBlockInfo>> outputMap, int numberOfBlocks, int numberOfNodes) { int nodesPerBlock = numberOfBlocks / numberOfNodes; for (Map.Entry<String, List<TableBlockInfo>> entry : outputMap.entrySet()) { if (entry.getValue().size() < nodesPerBlock) { return false; } } return true; } /** * Test case with 5 blocks and 3 nodes * * @throws Exception */ @Test public void nodeBlockMappingTestWith5blocks3nodes() throws Exception { Map<TableBlockInfo, List<String>> inputMap = new HashMap<TableBlockInfo, List<String>>(5); TableBlockInfo block1 = new TableBlockInfo("part-0-0-1462341987000", 123, "1", new String[] { "1", "2", "3" }, 111); TableBlockInfo block2 = new TableBlockInfo("part-1-0-1462341987000", 123, "2", new String[] { "1", "2", "3" }, 111); TableBlockInfo block3 = new TableBlockInfo("part-2-0-1462341987000", 123, "3", new String[] { "1", "2", "3" }, 111); TableBlockInfo block4 = new TableBlockInfo("part-3-0-1462341987000", 123, "4", new String[] { "1", "2", "3" }, 111); TableBlockInfo block5 = new TableBlockInfo("part-4-0-1462341987000", 123, "5", new String[] { "1", "2", "3" }, 111); inputMap.put(block1, Arrays.asList(new String[]{"1","2","3"})); inputMap.put(block2, Arrays.asList(new String[]{"1","2","3"})); inputMap.put(block3, Arrays.asList(new String[]{"1","2","3"})); inputMap.put(block4, Arrays.asList(new String[]{"1","2","3"})); inputMap.put(block5, Arrays.asList(new String[]{"1","2","3"})); List<TableBlockInfo> inputBlocks = new ArrayList(6); inputBlocks.add(block1); inputBlocks.add(block2); inputBlocks.add(block3); inputBlocks.add(block4); inputBlocks.add(block5); Map<String, List<TableBlockInfo>> outputMap = CarbonLoaderUtil.nodeBlockMapping(inputBlocks, 3); Assert.assertTrue(calculateBlockDistribution(inputMap, outputMap, 5, 3)); Assert.assertTrue(calculateBlockLocality(inputMap, outputMap, 5, 3)); } /** * Test case with 6 blocks and 4 nodes where 4 th node doesnt have any local data. * * @throws Exception */ @Test public void nodeBlockMappingTestWith6Blocks4nodes() throws Exception { Map<TableBlockInfo, List<String>> inputMap = new HashMap<TableBlockInfo, List<String>>(5); TableBlockInfo block1 = new TableBlockInfo("part-0-0-1462341987000", 123, "1", new String[] { "1", "2", "3" }, 111); TableBlockInfo block2 = new TableBlockInfo("part-1-0-1462341987000", 123, "2", new String[] { "1", "2", "3" }, 111); TableBlockInfo block3 = new TableBlockInfo("part-2-0-1462341987000", 123, "3", new String[] { "1", "2", "3" }, 111); TableBlockInfo block4 = new TableBlockInfo("part-3-0-1462341987000", 123, "4", new String[] { "1", "2", "3" }, 111); TableBlockInfo block5 = new TableBlockInfo("part-4-0-1462341987000", 123, "5", new String[] { "1", "2", "3" }, 111); TableBlockInfo block6 = new TableBlockInfo("part-5-0-1462341987000", 123, "6", new String[] { "1", "2", "3" }, 111); inputMap.put(block1, Arrays.asList(new String[]{"1","2","3"})); inputMap.put(block2, Arrays.asList(new String[]{"1","2","3"})); inputMap.put(block3, Arrays.asList(new String[]{"1","2","3"})); inputMap.put(block4, Arrays.asList(new String[]{"1","2","3"})); inputMap.put(block5, Arrays.asList(new String[]{"1","2","3"})); inputMap.put(block6, Arrays.asList(new String[]{"1","2","3"})); List<TableBlockInfo> inputBlocks = new ArrayList(6); inputBlocks.add(block1); inputBlocks.add(block2); inputBlocks.add(block3); inputBlocks.add(block4); inputBlocks.add(block5); inputBlocks.add(block6); Map<String, List<TableBlockInfo>> outputMap = CarbonLoaderUtil.nodeBlockMapping(inputBlocks, 4); Assert.assertTrue(calculateBlockDistribution(inputMap, outputMap, 6, 4)); Assert.assertTrue(calculateBlockLocality(inputMap, outputMap, 6, 4)); } /** * Test case with 10 blocks and 4 nodes with 10,60,30 % distribution * * @throws Exception */ @Test public void nodeBlockMappingTestWith10Blocks4nodes() throws Exception { Map<TableBlockInfo, List<String>> inputMap = new HashMap<TableBlockInfo, List<String>>(5); TableBlockInfo block1 = new TableBlockInfo("part-1-0-1462341987000", 123, "1", new String[] { "2", "4" }, 111); TableBlockInfo block2 = new TableBlockInfo("part-2-0-1462341987000", 123, "2", new String[] { "2", "4" }, 111); TableBlockInfo block3 = new TableBlockInfo("part-3-0-1462341987000", 123, "3", new String[] { "2", "4" }, 111); TableBlockInfo block4 = new TableBlockInfo("part-4-0-1462341987000", 123, "4", new String[] { "2", "4" }, 111); TableBlockInfo block5 = new TableBlockInfo("part-5-0-1462341987000", 123, "5", new String[] { "2", "4" }, 111); TableBlockInfo block6 = new TableBlockInfo("part-6-0-1462341987000", 123, "6", new String[] { "2", "4" }, 111); TableBlockInfo block7 = new TableBlockInfo("part-7-0-1462341987000", 123, "7", new String[] { "3", "4" }, 111); TableBlockInfo block8 = new TableBlockInfo("part-8-0-1462341987000", 123, "8", new String[] { "3", "4" }, 111); TableBlockInfo block9 = new TableBlockInfo("part-9-0-1462341987000", 123, "9", new String[] { "3", "4" }, 111); TableBlockInfo block10 = new TableBlockInfo("part-10-0-1462341987000", 123, "9", new String[] { "1", "4" }, 111); inputMap.put(block1, Arrays.asList(new String[]{"2","4"})); inputMap.put(block2, Arrays.asList(new String[]{"2","4"})); inputMap.put(block3, Arrays.asList(new String[]{"2","4"})); inputMap.put(block4, Arrays.asList(new String[]{"2","4"})); inputMap.put(block5, Arrays.asList(new String[]{"2","4"})); inputMap.put(block6, Arrays.asList(new String[]{"2","4"})); inputMap.put(block7, Arrays.asList(new String[]{"3","4"})); inputMap.put(block8, Arrays.asList(new String[]{"3","4"})); inputMap.put(block9, Arrays.asList(new String[]{"3","4"})); inputMap.put(block10, Arrays.asList(new String[]{"1","4"})); List<TableBlockInfo> inputBlocks = new ArrayList(6); inputBlocks.add(block1); inputBlocks.add(block2); inputBlocks.add(block3); inputBlocks.add(block4); inputBlocks.add(block5); inputBlocks.add(block6); inputBlocks.add(block7); inputBlocks.add(block8); inputBlocks.add(block9); inputBlocks.add(block10); Map<String, List<TableBlockInfo>> outputMap = CarbonLoaderUtil.nodeBlockMapping(inputBlocks, 4); Assert.assertTrue(calculateBlockDistribution(inputMap, outputMap, 10, 4)); Assert.assertTrue(calculateBlockLocality(inputMap, outputMap, 10, 4)); } }
/** */ package CIM15.IEC61970.Informative.InfTypeAsset; import CIM15.CIM15Package; import CIM15.IEC61968.AssetModels.AssetModelsPackage; import CIM15.IEC61968.Assets.AssetsPackage; import CIM15.IEC61968.Common.CommonPackage; import CIM15.IEC61968.Customers.CustomersPackage; import CIM15.IEC61968.IEC61968Package; import CIM15.IEC61968.LoadControl.LoadControlPackage; import CIM15.IEC61968.Metering.MeteringPackage; import CIM15.IEC61968.PaymentMetering.PaymentMeteringPackage; import CIM15.IEC61968.Work.WorkPackage; import CIM15.IEC61970.AuxiliaryEquipment.AuxiliaryEquipmentPackage; import CIM15.IEC61970.Contingency.ContingencyPackage; import CIM15.IEC61970.ControlArea.ControlAreaPackage; import CIM15.IEC61970.Core.CorePackage; import CIM15.IEC61970.CutsJumpers.CutsJumpersPackage; import CIM15.IEC61970.Domain.DomainPackage; import CIM15.IEC61970.Equivalents.EquivalentsPackage; import CIM15.IEC61970.Generation.GenerationDynamics.GenerationDynamicsPackage; import CIM15.IEC61970.Generation.Production.ProductionPackage; import CIM15.IEC61970.Graphics.GraphicsPackage; import CIM15.IEC61970.IEC61970Package; import CIM15.IEC61970.Informative.InfAssetModels.InfAssetModelsPackage; import CIM15.IEC61970.Informative.InfAssets.InfAssetsPackage; import CIM15.IEC61970.Informative.InfCommon.InfCommonPackage; import CIM15.IEC61970.Informative.InfCore.InfCorePackage; import CIM15.IEC61970.Informative.InfCustomers.InfCustomersPackage; import CIM15.IEC61970.Informative.InfERPSupport.InfERPSupportPackage; import CIM15.IEC61970.Informative.InfGMLSupport.InfGMLSupportPackage; import CIM15.IEC61970.Informative.InfLoadControl.InfLoadControlPackage; import CIM15.IEC61970.Informative.InfLocations.InfLocationsPackage; import CIM15.IEC61970.Informative.InfMetering.InfMeteringPackage; import CIM15.IEC61970.Informative.InfOperations.InfOperationsPackage; import CIM15.IEC61970.Informative.InfWork.InfWorkPackage; import CIM15.IEC61970.LoadModel.LoadModelPackage; import CIM15.IEC61970.Meas.MeasPackage; import CIM15.IEC61970.OperationalLimits.OperationalLimitsPackage; import CIM15.IEC61970.Outage.OutagePackage; import CIM15.IEC61970.Protection.ProtectionPackage; import CIM15.IEC61970.SCADA.SCADAPackage; import CIM15.IEC61970.StateVariables.StateVariablesPackage; import CIM15.IEC61970.Topology.TopologyPackage; import CIM15.IEC61970.Wires.WiresPackage; import CIM15.IEC61970.WiresPhaseModel.WiresPhaseModelPackage; import CIM15.IEC62325.IEC62325Package; import CIM15.PackageDependencies.PackageDependenciesPackage; import org.eclipse.emf.ecore.EAttribute; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EClassifier; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.EReference; import org.eclipse.emf.ecore.impl.EPackageImpl; /** * <!-- begin-user-doc --> * The <b>Package</b> for the model. * It contains accessors for the meta objects to represent * <ul> * <li>each class,</li> * <li>each feature of each class,</li> * <li>each operation of each class,</li> * <li>each enum,</li> * <li>and each data type</li> * </ul> * <!-- end-user-doc --> * @see CIM15.IEC61970.Informative.InfTypeAsset.InfTypeAssetFactory * @generated */ public class InfTypeAssetPackage extends EPackageImpl { /** * The package name. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final String eNAME = "InfTypeAsset"; /** * The package namespace URI. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final String eNS_URI = "http://iec.ch/TC57/2010/CIM-schema-cim15#InfTypeAsset"; /** * The package namespace name. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final String eNS_PREFIX = "cimInfTypeAsset"; /** * The singleton instance of the package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final InfTypeAssetPackage eINSTANCE = CIM15.IEC61970.Informative.InfTypeAsset.InfTypeAssetPackage.init(); /** * The meta object id for the '{@link CIM15.IEC61970.Informative.InfTypeAsset.TypeAssetCatalogue <em>Type Asset Catalogue</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see CIM15.IEC61970.Informative.InfTypeAsset.TypeAssetCatalogue * @see CIM15.IEC61970.Informative.InfTypeAsset.InfTypeAssetPackage#getTypeAssetCatalogue() * @generated */ public static final int TYPE_ASSET_CATALOGUE = 0; /** * The feature id for the '<em><b>UUID</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE__UUID = CorePackage.IDENTIFIED_OBJECT__UUID; /** * The feature id for the '<em><b>Names</b></em>' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE__NAMES = CorePackage.IDENTIFIED_OBJECT__NAMES; /** * The feature id for the '<em><b>Diagram Objects</b></em>' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE__DIAGRAM_OBJECTS = CorePackage.IDENTIFIED_OBJECT__DIAGRAM_OBJECTS; /** * The feature id for the '<em><b>MRID</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE__MRID = CorePackage.IDENTIFIED_OBJECT__MRID; /** * The feature id for the '<em><b>Alias Name</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE__ALIAS_NAME = CorePackage.IDENTIFIED_OBJECT__ALIAS_NAME; /** * The feature id for the '<em><b>Name</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE__NAME = CorePackage.IDENTIFIED_OBJECT__NAME; /** * The feature id for the '<em><b>Modeling Authority Set</b></em>' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE__MODELING_AUTHORITY_SET = CorePackage.IDENTIFIED_OBJECT__MODELING_AUTHORITY_SET; /** * The feature id for the '<em><b>Type Assets</b></em>' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE__TYPE_ASSETS = CorePackage.IDENTIFIED_OBJECT_FEATURE_COUNT + 0; /** * The feature id for the '<em><b>Status</b></em>' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE__STATUS = CorePackage.IDENTIFIED_OBJECT_FEATURE_COUNT + 1; /** * The number of structural features of the '<em>Type Asset Catalogue</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE_FEATURE_COUNT = CorePackage.IDENTIFIED_OBJECT_FEATURE_COUNT + 2; /** * The number of operations of the '<em>Type Asset Catalogue</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int TYPE_ASSET_CATALOGUE_OPERATION_COUNT = CorePackage.IDENTIFIED_OBJECT_OPERATION_COUNT + 0; /** * The meta object id for the '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset <em>Generator Type Asset</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset * @see CIM15.IEC61970.Informative.InfTypeAsset.InfTypeAssetPackage#getGeneratorTypeAsset() * @generated */ public static final int GENERATOR_TYPE_ASSET = 1; /** * The feature id for the '<em><b>UUID</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__UUID = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__UUID; /** * The feature id for the '<em><b>Names</b></em>' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__NAMES = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__NAMES; /** * The feature id for the '<em><b>Diagram Objects</b></em>' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__DIAGRAM_OBJECTS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__DIAGRAM_OBJECTS; /** * The feature id for the '<em><b>MRID</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__MRID = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__MRID; /** * The feature id for the '<em><b>Alias Name</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__ALIAS_NAME = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__ALIAS_NAME; /** * The feature id for the '<em><b>Name</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__NAME = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__NAME; /** * The feature id for the '<em><b>Modeling Authority Set</b></em>' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__MODELING_AUTHORITY_SET = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__MODELING_AUTHORITY_SET; /** * The feature id for the '<em><b>Asset Info</b></em>' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__ASSET_INFO = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__ASSET_INFO; /** * The feature id for the '<em><b>Erp Inventory Counts</b></em>' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__ERP_INVENTORY_COUNTS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__ERP_INVENTORY_COUNTS; /** * The feature id for the '<em><b>Erp Req Line Items</b></em>' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__ERP_REQ_LINE_ITEMS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__ERP_REQ_LINE_ITEMS; /** * The feature id for the '<em><b>Product Asset Models</b></em>' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__PRODUCT_ASSET_MODELS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__PRODUCT_ASSET_MODELS; /** * The feature id for the '<em><b>Erp Inventory Issues</b></em>' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__ERP_INVENTORY_ISSUES = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__ERP_INVENTORY_ISSUES; /** * The feature id for the '<em><b>CU Work Equipment Asset</b></em>' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__CU_WORK_EQUIPMENT_ASSET = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__CU_WORK_EQUIPMENT_ASSET; /** * The feature id for the '<em><b>Estimated Unit Cost</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__ESTIMATED_UNIT_COST = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__ESTIMATED_UNIT_COST; /** * The feature id for the '<em><b>Stock Item</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__STOCK_ITEM = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__STOCK_ITEM; /** * The feature id for the '<em><b>Type Asset Catalogue</b></em>' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__TYPE_ASSET_CATALOGUE = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__TYPE_ASSET_CATALOGUE; /** * The feature id for the '<em><b>CU Asset</b></em>' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__CU_ASSET = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__CU_ASSET; /** * The feature id for the '<em><b>Erp Bom Item Datas</b></em>' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__ERP_BOM_ITEM_DATAS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__ERP_BOM_ITEM_DATAS; /** * The feature id for the '<em><b>Quantity</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__QUANTITY = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL__QUANTITY; /** * The feature id for the '<em><b>XDirect Sync</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__XDIRECT_SYNC = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 0; /** * The feature id for the '<em><b>RQuad Subtrans</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__RQUAD_SUBTRANS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 1; /** * The feature id for the '<em><b>RDirect Sync</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__RDIRECT_SYNC = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 2; /** * The feature id for the '<em><b>RDirect Subtrans</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__RDIRECT_SUBTRANS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 3; /** * The feature id for the '<em><b>Max Q</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__MAX_Q = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 4; /** * The feature id for the '<em><b>XQuad Sync</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__XQUAD_SYNC = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 5; /** * The feature id for the '<em><b>Min Q</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__MIN_Q = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 6; /** * The feature id for the '<em><b>Min P</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__MIN_P = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 7; /** * The feature id for the '<em><b>Max P</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__MAX_P = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 8; /** * The feature id for the '<em><b>RQuad Sync</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__RQUAD_SYNC = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 9; /** * The feature id for the '<em><b>XQuad Subtrans</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__XQUAD_SUBTRANS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 10; /** * The feature id for the '<em><b>RDirect Trans</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__RDIRECT_TRANS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 11; /** * The feature id for the '<em><b>RQuad Trans</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__RQUAD_TRANS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 12; /** * The feature id for the '<em><b>XDirect Subtrans</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__XDIRECT_SUBTRANS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 13; /** * The feature id for the '<em><b>XDirect Trans</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__XDIRECT_TRANS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 14; /** * The feature id for the '<em><b>XQuad Trans</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET__XQUAD_TRANS = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 15; /** * The number of structural features of the '<em>Generator Type Asset</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET_FEATURE_COUNT = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_FEATURE_COUNT + 16; /** * The number of operations of the '<em>Generator Type Asset</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ public static final int GENERATOR_TYPE_ASSET_OPERATION_COUNT = InfAssetsPackage.GENERIC_ASSET_MODEL_OR_MATERIAL_OPERATION_COUNT + 0; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private EClass typeAssetCatalogueEClass = null; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private EClass generatorTypeAssetEClass = null; /** * Creates an instance of the model <b>Package</b>, registered with * {@link org.eclipse.emf.ecore.EPackage.Registry EPackage.Registry} by the package * package URI value. * <p>Note: the correct way to create the package is via the static * factory method {@link #init init()}, which also performs * initialization of the package, or returns the registered package, * if one already exists. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see org.eclipse.emf.ecore.EPackage.Registry * @see CIM15.IEC61970.Informative.InfTypeAsset.InfTypeAssetPackage#eNS_URI * @see #init() * @generated */ private InfTypeAssetPackage() { super(eNS_URI, InfTypeAssetFactory.INSTANCE); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private static boolean isInited = false; /** * Creates, registers, and initializes the <b>Package</b> for this model, and for any others upon which it depends. * * <p>This method is used to initialize {@link InfTypeAssetPackage#eINSTANCE} when that field is accessed. * Clients should not invoke it directly. Instead, they should simply access that field to obtain the package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #eNS_URI * @generated */ public static InfTypeAssetPackage init() { if (isInited) return (InfTypeAssetPackage)EPackage.Registry.INSTANCE.getEPackage(InfTypeAssetPackage.eNS_URI); // Obtain or create and register package InfTypeAssetPackage theInfTypeAssetPackage = (InfTypeAssetPackage)(EPackage.Registry.INSTANCE.get(eNS_URI) instanceof InfTypeAssetPackage ? EPackage.Registry.INSTANCE.get(eNS_URI) : new InfTypeAssetPackage()); isInited = true; // Obtain or create and register interdependencies CIM15Package theCIM15Package = (CIM15Package)(EPackage.Registry.INSTANCE.getEPackage(CIM15Package.eNS_URI) instanceof CIM15Package ? EPackage.Registry.INSTANCE.getEPackage(CIM15Package.eNS_URI) : CIM15Package.eINSTANCE); IEC61970Package theIEC61970Package = (IEC61970Package)(EPackage.Registry.INSTANCE.getEPackage(IEC61970Package.eNS_URI) instanceof IEC61970Package ? EPackage.Registry.INSTANCE.getEPackage(IEC61970Package.eNS_URI) : IEC61970Package.eINSTANCE); SCADAPackage theSCADAPackage = (SCADAPackage)(EPackage.Registry.INSTANCE.getEPackage(SCADAPackage.eNS_URI) instanceof SCADAPackage ? EPackage.Registry.INSTANCE.getEPackage(SCADAPackage.eNS_URI) : SCADAPackage.eINSTANCE); ProductionPackage theProductionPackage = (ProductionPackage)(EPackage.Registry.INSTANCE.getEPackage(ProductionPackage.eNS_URI) instanceof ProductionPackage ? EPackage.Registry.INSTANCE.getEPackage(ProductionPackage.eNS_URI) : ProductionPackage.eINSTANCE); GenerationDynamicsPackage theGenerationDynamicsPackage = (GenerationDynamicsPackage)(EPackage.Registry.INSTANCE.getEPackage(GenerationDynamicsPackage.eNS_URI) instanceof GenerationDynamicsPackage ? EPackage.Registry.INSTANCE.getEPackage(GenerationDynamicsPackage.eNS_URI) : GenerationDynamicsPackage.eINSTANCE); InfWorkPackage theInfWorkPackage = (InfWorkPackage)(EPackage.Registry.INSTANCE.getEPackage(InfWorkPackage.eNS_URI) instanceof InfWorkPackage ? EPackage.Registry.INSTANCE.getEPackage(InfWorkPackage.eNS_URI) : InfWorkPackage.eINSTANCE); InfERPSupportPackage theInfERPSupportPackage = (InfERPSupportPackage)(EPackage.Registry.INSTANCE.getEPackage(InfERPSupportPackage.eNS_URI) instanceof InfERPSupportPackage ? EPackage.Registry.INSTANCE.getEPackage(InfERPSupportPackage.eNS_URI) : InfERPSupportPackage.eINSTANCE); InfCommonPackage theInfCommonPackage = (InfCommonPackage)(EPackage.Registry.INSTANCE.getEPackage(InfCommonPackage.eNS_URI) instanceof InfCommonPackage ? EPackage.Registry.INSTANCE.getEPackage(InfCommonPackage.eNS_URI) : InfCommonPackage.eINSTANCE); InfAssetsPackage theInfAssetsPackage = (InfAssetsPackage)(EPackage.Registry.INSTANCE.getEPackage(InfAssetsPackage.eNS_URI) instanceof InfAssetsPackage ? EPackage.Registry.INSTANCE.getEPackage(InfAssetsPackage.eNS_URI) : InfAssetsPackage.eINSTANCE); InfCustomersPackage theInfCustomersPackage = (InfCustomersPackage)(EPackage.Registry.INSTANCE.getEPackage(InfCustomersPackage.eNS_URI) instanceof InfCustomersPackage ? EPackage.Registry.INSTANCE.getEPackage(InfCustomersPackage.eNS_URI) : InfCustomersPackage.eINSTANCE); InfOperationsPackage theInfOperationsPackage = (InfOperationsPackage)(EPackage.Registry.INSTANCE.getEPackage(InfOperationsPackage.eNS_URI) instanceof InfOperationsPackage ? EPackage.Registry.INSTANCE.getEPackage(InfOperationsPackage.eNS_URI) : InfOperationsPackage.eINSTANCE); InfLocationsPackage theInfLocationsPackage = (InfLocationsPackage)(EPackage.Registry.INSTANCE.getEPackage(InfLocationsPackage.eNS_URI) instanceof InfLocationsPackage ? EPackage.Registry.INSTANCE.getEPackage(InfLocationsPackage.eNS_URI) : InfLocationsPackage.eINSTANCE); InfGMLSupportPackage theInfGMLSupportPackage = (InfGMLSupportPackage)(EPackage.Registry.INSTANCE.getEPackage(InfGMLSupportPackage.eNS_URI) instanceof InfGMLSupportPackage ? EPackage.Registry.INSTANCE.getEPackage(InfGMLSupportPackage.eNS_URI) : InfGMLSupportPackage.eINSTANCE); InfCorePackage theInfCorePackage = (InfCorePackage)(EPackage.Registry.INSTANCE.getEPackage(InfCorePackage.eNS_URI) instanceof InfCorePackage ? EPackage.Registry.INSTANCE.getEPackage(InfCorePackage.eNS_URI) : InfCorePackage.eINSTANCE); InfLoadControlPackage theInfLoadControlPackage = (InfLoadControlPackage)(EPackage.Registry.INSTANCE.getEPackage(InfLoadControlPackage.eNS_URI) instanceof InfLoadControlPackage ? EPackage.Registry.INSTANCE.getEPackage(InfLoadControlPackage.eNS_URI) : InfLoadControlPackage.eINSTANCE); InfMeteringPackage theInfMeteringPackage = (InfMeteringPackage)(EPackage.Registry.INSTANCE.getEPackage(InfMeteringPackage.eNS_URI) instanceof InfMeteringPackage ? EPackage.Registry.INSTANCE.getEPackage(InfMeteringPackage.eNS_URI) : InfMeteringPackage.eINSTANCE); InfAssetModelsPackage theInfAssetModelsPackage = (InfAssetModelsPackage)(EPackage.Registry.INSTANCE.getEPackage(InfAssetModelsPackage.eNS_URI) instanceof InfAssetModelsPackage ? EPackage.Registry.INSTANCE.getEPackage(InfAssetModelsPackage.eNS_URI) : InfAssetModelsPackage.eINSTANCE); StateVariablesPackage theStateVariablesPackage = (StateVariablesPackage)(EPackage.Registry.INSTANCE.getEPackage(StateVariablesPackage.eNS_URI) instanceof StateVariablesPackage ? EPackage.Registry.INSTANCE.getEPackage(StateVariablesPackage.eNS_URI) : StateVariablesPackage.eINSTANCE); WiresPackage theWiresPackage = (WiresPackage)(EPackage.Registry.INSTANCE.getEPackage(WiresPackage.eNS_URI) instanceof WiresPackage ? EPackage.Registry.INSTANCE.getEPackage(WiresPackage.eNS_URI) : WiresPackage.eINSTANCE); MeasPackage theMeasPackage = (MeasPackage)(EPackage.Registry.INSTANCE.getEPackage(MeasPackage.eNS_URI) instanceof MeasPackage ? EPackage.Registry.INSTANCE.getEPackage(MeasPackage.eNS_URI) : MeasPackage.eINSTANCE); LoadModelPackage theLoadModelPackage = (LoadModelPackage)(EPackage.Registry.INSTANCE.getEPackage(LoadModelPackage.eNS_URI) instanceof LoadModelPackage ? EPackage.Registry.INSTANCE.getEPackage(LoadModelPackage.eNS_URI) : LoadModelPackage.eINSTANCE); WiresPhaseModelPackage theWiresPhaseModelPackage = (WiresPhaseModelPackage)(EPackage.Registry.INSTANCE.getEPackage(WiresPhaseModelPackage.eNS_URI) instanceof WiresPhaseModelPackage ? EPackage.Registry.INSTANCE.getEPackage(WiresPhaseModelPackage.eNS_URI) : WiresPhaseModelPackage.eINSTANCE); ControlAreaPackage theControlAreaPackage = (ControlAreaPackage)(EPackage.Registry.INSTANCE.getEPackage(ControlAreaPackage.eNS_URI) instanceof ControlAreaPackage ? EPackage.Registry.INSTANCE.getEPackage(ControlAreaPackage.eNS_URI) : ControlAreaPackage.eINSTANCE); AuxiliaryEquipmentPackage theAuxiliaryEquipmentPackage = (AuxiliaryEquipmentPackage)(EPackage.Registry.INSTANCE.getEPackage(AuxiliaryEquipmentPackage.eNS_URI) instanceof AuxiliaryEquipmentPackage ? EPackage.Registry.INSTANCE.getEPackage(AuxiliaryEquipmentPackage.eNS_URI) : AuxiliaryEquipmentPackage.eINSTANCE); DomainPackage theDomainPackage = (DomainPackage)(EPackage.Registry.INSTANCE.getEPackage(DomainPackage.eNS_URI) instanceof DomainPackage ? EPackage.Registry.INSTANCE.getEPackage(DomainPackage.eNS_URI) : DomainPackage.eINSTANCE); CorePackage theCorePackage = (CorePackage)(EPackage.Registry.INSTANCE.getEPackage(CorePackage.eNS_URI) instanceof CorePackage ? EPackage.Registry.INSTANCE.getEPackage(CorePackage.eNS_URI) : CorePackage.eINSTANCE); GraphicsPackage theGraphicsPackage = (GraphicsPackage)(EPackage.Registry.INSTANCE.getEPackage(GraphicsPackage.eNS_URI) instanceof GraphicsPackage ? EPackage.Registry.INSTANCE.getEPackage(GraphicsPackage.eNS_URI) : GraphicsPackage.eINSTANCE); OperationalLimitsPackage theOperationalLimitsPackage = (OperationalLimitsPackage)(EPackage.Registry.INSTANCE.getEPackage(OperationalLimitsPackage.eNS_URI) instanceof OperationalLimitsPackage ? EPackage.Registry.INSTANCE.getEPackage(OperationalLimitsPackage.eNS_URI) : OperationalLimitsPackage.eINSTANCE); OutagePackage theOutagePackage = (OutagePackage)(EPackage.Registry.INSTANCE.getEPackage(OutagePackage.eNS_URI) instanceof OutagePackage ? EPackage.Registry.INSTANCE.getEPackage(OutagePackage.eNS_URI) : OutagePackage.eINSTANCE); CutsJumpersPackage theCutsJumpersPackage = (CutsJumpersPackage)(EPackage.Registry.INSTANCE.getEPackage(CutsJumpersPackage.eNS_URI) instanceof CutsJumpersPackage ? EPackage.Registry.INSTANCE.getEPackage(CutsJumpersPackage.eNS_URI) : CutsJumpersPackage.eINSTANCE); ProtectionPackage theProtectionPackage = (ProtectionPackage)(EPackage.Registry.INSTANCE.getEPackage(ProtectionPackage.eNS_URI) instanceof ProtectionPackage ? EPackage.Registry.INSTANCE.getEPackage(ProtectionPackage.eNS_URI) : ProtectionPackage.eINSTANCE); EquivalentsPackage theEquivalentsPackage = (EquivalentsPackage)(EPackage.Registry.INSTANCE.getEPackage(EquivalentsPackage.eNS_URI) instanceof EquivalentsPackage ? EPackage.Registry.INSTANCE.getEPackage(EquivalentsPackage.eNS_URI) : EquivalentsPackage.eINSTANCE); ContingencyPackage theContingencyPackage = (ContingencyPackage)(EPackage.Registry.INSTANCE.getEPackage(ContingencyPackage.eNS_URI) instanceof ContingencyPackage ? EPackage.Registry.INSTANCE.getEPackage(ContingencyPackage.eNS_URI) : ContingencyPackage.eINSTANCE); TopologyPackage theTopologyPackage = (TopologyPackage)(EPackage.Registry.INSTANCE.getEPackage(TopologyPackage.eNS_URI) instanceof TopologyPackage ? EPackage.Registry.INSTANCE.getEPackage(TopologyPackage.eNS_URI) : TopologyPackage.eINSTANCE); IEC61968Package theIEC61968Package = (IEC61968Package)(EPackage.Registry.INSTANCE.getEPackage(IEC61968Package.eNS_URI) instanceof IEC61968Package ? EPackage.Registry.INSTANCE.getEPackage(IEC61968Package.eNS_URI) : IEC61968Package.eINSTANCE); CommonPackage theCommonPackage = (CommonPackage)(EPackage.Registry.INSTANCE.getEPackage(CommonPackage.eNS_URI) instanceof CommonPackage ? EPackage.Registry.INSTANCE.getEPackage(CommonPackage.eNS_URI) : CommonPackage.eINSTANCE); AssetModelsPackage theAssetModelsPackage = (AssetModelsPackage)(EPackage.Registry.INSTANCE.getEPackage(AssetModelsPackage.eNS_URI) instanceof AssetModelsPackage ? EPackage.Registry.INSTANCE.getEPackage(AssetModelsPackage.eNS_URI) : AssetModelsPackage.eINSTANCE); MeteringPackage theMeteringPackage = (MeteringPackage)(EPackage.Registry.INSTANCE.getEPackage(MeteringPackage.eNS_URI) instanceof MeteringPackage ? EPackage.Registry.INSTANCE.getEPackage(MeteringPackage.eNS_URI) : MeteringPackage.eINSTANCE); PaymentMeteringPackage thePaymentMeteringPackage = (PaymentMeteringPackage)(EPackage.Registry.INSTANCE.getEPackage(PaymentMeteringPackage.eNS_URI) instanceof PaymentMeteringPackage ? EPackage.Registry.INSTANCE.getEPackage(PaymentMeteringPackage.eNS_URI) : PaymentMeteringPackage.eINSTANCE); AssetsPackage theAssetsPackage = (AssetsPackage)(EPackage.Registry.INSTANCE.getEPackage(AssetsPackage.eNS_URI) instanceof AssetsPackage ? EPackage.Registry.INSTANCE.getEPackage(AssetsPackage.eNS_URI) : AssetsPackage.eINSTANCE); WorkPackage theWorkPackage = (WorkPackage)(EPackage.Registry.INSTANCE.getEPackage(WorkPackage.eNS_URI) instanceof WorkPackage ? EPackage.Registry.INSTANCE.getEPackage(WorkPackage.eNS_URI) : WorkPackage.eINSTANCE); CustomersPackage theCustomersPackage = (CustomersPackage)(EPackage.Registry.INSTANCE.getEPackage(CustomersPackage.eNS_URI) instanceof CustomersPackage ? EPackage.Registry.INSTANCE.getEPackage(CustomersPackage.eNS_URI) : CustomersPackage.eINSTANCE); LoadControlPackage theLoadControlPackage = (LoadControlPackage)(EPackage.Registry.INSTANCE.getEPackage(LoadControlPackage.eNS_URI) instanceof LoadControlPackage ? EPackage.Registry.INSTANCE.getEPackage(LoadControlPackage.eNS_URI) : LoadControlPackage.eINSTANCE); IEC62325Package theIEC62325Package = (IEC62325Package)(EPackage.Registry.INSTANCE.getEPackage(IEC62325Package.eNS_URI) instanceof IEC62325Package ? EPackage.Registry.INSTANCE.getEPackage(IEC62325Package.eNS_URI) : IEC62325Package.eINSTANCE); PackageDependenciesPackage thePackageDependenciesPackage = (PackageDependenciesPackage)(EPackage.Registry.INSTANCE.getEPackage(PackageDependenciesPackage.eNS_URI) instanceof PackageDependenciesPackage ? EPackage.Registry.INSTANCE.getEPackage(PackageDependenciesPackage.eNS_URI) : PackageDependenciesPackage.eINSTANCE); // Load packages theCIM15Package.loadPackage(); // Fix loaded packages theInfTypeAssetPackage.fixPackageContents(); theCIM15Package.fixPackageContents(); theIEC61970Package.fixPackageContents(); theSCADAPackage.fixPackageContents(); theProductionPackage.fixPackageContents(); theGenerationDynamicsPackage.fixPackageContents(); theInfWorkPackage.fixPackageContents(); theInfERPSupportPackage.fixPackageContents(); theInfCommonPackage.fixPackageContents(); theInfAssetsPackage.fixPackageContents(); theInfCustomersPackage.fixPackageContents(); theInfOperationsPackage.fixPackageContents(); theInfLocationsPackage.fixPackageContents(); theInfGMLSupportPackage.fixPackageContents(); theInfCorePackage.fixPackageContents(); theInfLoadControlPackage.fixPackageContents(); theInfMeteringPackage.fixPackageContents(); theInfAssetModelsPackage.fixPackageContents(); theStateVariablesPackage.fixPackageContents(); theWiresPackage.fixPackageContents(); theMeasPackage.fixPackageContents(); theLoadModelPackage.fixPackageContents(); theWiresPhaseModelPackage.fixPackageContents(); theControlAreaPackage.fixPackageContents(); theAuxiliaryEquipmentPackage.fixPackageContents(); theDomainPackage.fixPackageContents(); theCorePackage.fixPackageContents(); theGraphicsPackage.fixPackageContents(); theOperationalLimitsPackage.fixPackageContents(); theOutagePackage.fixPackageContents(); theCutsJumpersPackage.fixPackageContents(); theProtectionPackage.fixPackageContents(); theEquivalentsPackage.fixPackageContents(); theContingencyPackage.fixPackageContents(); theTopologyPackage.fixPackageContents(); theIEC61968Package.fixPackageContents(); theCommonPackage.fixPackageContents(); theAssetModelsPackage.fixPackageContents(); theMeteringPackage.fixPackageContents(); thePaymentMeteringPackage.fixPackageContents(); theAssetsPackage.fixPackageContents(); theWorkPackage.fixPackageContents(); theCustomersPackage.fixPackageContents(); theLoadControlPackage.fixPackageContents(); theIEC62325Package.fixPackageContents(); thePackageDependenciesPackage.fixPackageContents(); // Mark meta-data to indicate it can't be changed theInfTypeAssetPackage.freeze(); // Update the registry and return the package EPackage.Registry.INSTANCE.put(InfTypeAssetPackage.eNS_URI, theInfTypeAssetPackage); return theInfTypeAssetPackage; } /** * Returns the meta object for class '{@link CIM15.IEC61970.Informative.InfTypeAsset.TypeAssetCatalogue <em>Type Asset Catalogue</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Type Asset Catalogue</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.TypeAssetCatalogue * @generated */ public EClass getTypeAssetCatalogue() { if (typeAssetCatalogueEClass == null) { typeAssetCatalogueEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(InfTypeAssetPackage.eNS_URI).getEClassifiers().get(0); } return typeAssetCatalogueEClass; } /** * Returns the meta object for the reference list '{@link CIM15.IEC61970.Informative.InfTypeAsset.TypeAssetCatalogue#getTypeAssets <em>Type Assets</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the reference list '<em>Type Assets</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.TypeAssetCatalogue#getTypeAssets() * @see #getTypeAssetCatalogue() * @generated */ public EReference getTypeAssetCatalogue_TypeAssets() { return (EReference)getTypeAssetCatalogue().getEStructuralFeatures().get(0); } /** * Returns the meta object for the containment reference '{@link CIM15.IEC61970.Informative.InfTypeAsset.TypeAssetCatalogue#getStatus <em>Status</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the containment reference '<em>Status</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.TypeAssetCatalogue#getStatus() * @see #getTypeAssetCatalogue() * @generated */ public EReference getTypeAssetCatalogue_Status() { return (EReference)getTypeAssetCatalogue().getEStructuralFeatures().get(1); } /** * Returns the meta object for class '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset <em>Generator Type Asset</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Generator Type Asset</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset * @generated */ public EClass getGeneratorTypeAsset() { if (generatorTypeAssetEClass == null) { generatorTypeAssetEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(InfTypeAssetPackage.eNS_URI).getEClassifiers().get(1); } return generatorTypeAssetEClass; } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXDirectSync <em>XDirect Sync</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>XDirect Sync</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXDirectSync() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_XDirectSync() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(0); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRQuadSubtrans <em>RQuad Subtrans</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>RQuad Subtrans</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRQuadSubtrans() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_RQuadSubtrans() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(1); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRDirectSync <em>RDirect Sync</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>RDirect Sync</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRDirectSync() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_RDirectSync() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(2); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRDirectSubtrans <em>RDirect Subtrans</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>RDirect Subtrans</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRDirectSubtrans() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_RDirectSubtrans() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(3); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getMaxQ <em>Max Q</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>Max Q</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getMaxQ() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_MaxQ() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(4); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXQuadSync <em>XQuad Sync</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>XQuad Sync</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXQuadSync() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_XQuadSync() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(5); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getMinQ <em>Min Q</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>Min Q</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getMinQ() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_MinQ() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(6); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getMinP <em>Min P</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>Min P</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getMinP() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_MinP() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(7); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getMaxP <em>Max P</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>Max P</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getMaxP() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_MaxP() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(8); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRQuadSync <em>RQuad Sync</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>RQuad Sync</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRQuadSync() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_RQuadSync() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(9); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXQuadSubtrans <em>XQuad Subtrans</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>XQuad Subtrans</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXQuadSubtrans() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_XQuadSubtrans() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(10); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRDirectTrans <em>RDirect Trans</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>RDirect Trans</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRDirectTrans() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_RDirectTrans() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(11); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRQuadTrans <em>RQuad Trans</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>RQuad Trans</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getRQuadTrans() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_RQuadTrans() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(12); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXDirectSubtrans <em>XDirect Subtrans</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>XDirect Subtrans</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXDirectSubtrans() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_XDirectSubtrans() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(13); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXDirectTrans <em>XDirect Trans</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>XDirect Trans</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXDirectTrans() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_XDirectTrans() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(14); } /** * Returns the meta object for the attribute '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXQuadTrans <em>XQuad Trans</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>XQuad Trans</em>'. * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset#getXQuadTrans() * @see #getGeneratorTypeAsset() * @generated */ public EAttribute getGeneratorTypeAsset_XQuadTrans() { return (EAttribute)getGeneratorTypeAsset().getEStructuralFeatures().get(15); } /** * Returns the factory that creates the instances of the model. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the factory that creates the instances of the model. * @generated */ public InfTypeAssetFactory getInfTypeAssetFactory() { return (InfTypeAssetFactory)getEFactoryInstance(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private boolean isFixed = false; /** * Fixes up the loaded package, to make it appear as if it had been programmatically built. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void fixPackageContents() { if (isFixed) return; isFixed = true; fixEClassifiers(); } /** * Sets the instance class on the given classifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void fixInstanceClass(EClassifier eClassifier) { if (eClassifier.getInstanceClassName() == null) { eClassifier.setInstanceClassName("CIM15.IEC61970.Informative.InfTypeAsset." + eClassifier.getName()); setGeneratedClassName(eClassifier); } } /** * <!-- begin-user-doc --> * Defines literals for the meta objects that represent * <ul> * <li>each class,</li> * <li>each feature of each class,</li> * <li>each operation of each class,</li> * <li>each enum,</li> * <li>and each data type</li> * </ul> * <!-- end-user-doc --> * @generated */ public interface Literals { /** * The meta object literal for the '{@link CIM15.IEC61970.Informative.InfTypeAsset.TypeAssetCatalogue <em>Type Asset Catalogue</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see CIM15.IEC61970.Informative.InfTypeAsset.TypeAssetCatalogue * @see CIM15.IEC61970.Informative.InfTypeAsset.InfTypeAssetPackage#getTypeAssetCatalogue() * @generated */ public static final EClass TYPE_ASSET_CATALOGUE = eINSTANCE.getTypeAssetCatalogue(); /** * The meta object literal for the '<em><b>Type Assets</b></em>' reference list feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EReference TYPE_ASSET_CATALOGUE__TYPE_ASSETS = eINSTANCE.getTypeAssetCatalogue_TypeAssets(); /** * The meta object literal for the '<em><b>Status</b></em>' containment reference feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EReference TYPE_ASSET_CATALOGUE__STATUS = eINSTANCE.getTypeAssetCatalogue_Status(); /** * The meta object literal for the '{@link CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset <em>Generator Type Asset</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see CIM15.IEC61970.Informative.InfTypeAsset.GeneratorTypeAsset * @see CIM15.IEC61970.Informative.InfTypeAsset.InfTypeAssetPackage#getGeneratorTypeAsset() * @generated */ public static final EClass GENERATOR_TYPE_ASSET = eINSTANCE.getGeneratorTypeAsset(); /** * The meta object literal for the '<em><b>XDirect Sync</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__XDIRECT_SYNC = eINSTANCE.getGeneratorTypeAsset_XDirectSync(); /** * The meta object literal for the '<em><b>RQuad Subtrans</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__RQUAD_SUBTRANS = eINSTANCE.getGeneratorTypeAsset_RQuadSubtrans(); /** * The meta object literal for the '<em><b>RDirect Sync</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__RDIRECT_SYNC = eINSTANCE.getGeneratorTypeAsset_RDirectSync(); /** * The meta object literal for the '<em><b>RDirect Subtrans</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__RDIRECT_SUBTRANS = eINSTANCE.getGeneratorTypeAsset_RDirectSubtrans(); /** * The meta object literal for the '<em><b>Max Q</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__MAX_Q = eINSTANCE.getGeneratorTypeAsset_MaxQ(); /** * The meta object literal for the '<em><b>XQuad Sync</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__XQUAD_SYNC = eINSTANCE.getGeneratorTypeAsset_XQuadSync(); /** * The meta object literal for the '<em><b>Min Q</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__MIN_Q = eINSTANCE.getGeneratorTypeAsset_MinQ(); /** * The meta object literal for the '<em><b>Min P</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__MIN_P = eINSTANCE.getGeneratorTypeAsset_MinP(); /** * The meta object literal for the '<em><b>Max P</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__MAX_P = eINSTANCE.getGeneratorTypeAsset_MaxP(); /** * The meta object literal for the '<em><b>RQuad Sync</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__RQUAD_SYNC = eINSTANCE.getGeneratorTypeAsset_RQuadSync(); /** * The meta object literal for the '<em><b>XQuad Subtrans</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__XQUAD_SUBTRANS = eINSTANCE.getGeneratorTypeAsset_XQuadSubtrans(); /** * The meta object literal for the '<em><b>RDirect Trans</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__RDIRECT_TRANS = eINSTANCE.getGeneratorTypeAsset_RDirectTrans(); /** * The meta object literal for the '<em><b>RQuad Trans</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__RQUAD_TRANS = eINSTANCE.getGeneratorTypeAsset_RQuadTrans(); /** * The meta object literal for the '<em><b>XDirect Subtrans</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__XDIRECT_SUBTRANS = eINSTANCE.getGeneratorTypeAsset_XDirectSubtrans(); /** * The meta object literal for the '<em><b>XDirect Trans</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__XDIRECT_TRANS = eINSTANCE.getGeneratorTypeAsset_XDirectTrans(); /** * The meta object literal for the '<em><b>XQuad Trans</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final EAttribute GENERATOR_TYPE_ASSET__XQUAD_TRANS = eINSTANCE.getGeneratorTypeAsset_XQuadTrans(); } } //InfTypeAssetPackage
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.physical.impl.union; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.apache.calcite.util.Pair; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.common.expression.ErrorCollector; import org.apache.drill.common.expression.ErrorCollectorImpl; import org.apache.drill.common.expression.LogicalExpression; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.Types; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.exception.ClassTransformationException; import org.apache.drill.exec.exception.OutOfMemoryException; import org.apache.drill.exec.exception.SchemaChangeException; import org.apache.drill.exec.expr.ClassGenerator; import org.apache.drill.exec.expr.CodeGenerator; import org.apache.drill.exec.expr.ExpressionTreeMaterializer; import org.apache.drill.exec.expr.ValueVectorWriteExpression; import org.apache.drill.exec.ops.FragmentContext; import org.apache.drill.exec.physical.config.UnionAll; import org.apache.drill.exec.record.AbstractBinaryRecordBatch; import org.apache.drill.exec.record.BatchSchema; import org.apache.drill.exec.record.JoinBatchMemoryManager; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.record.RecordBatch; import org.apache.drill.exec.record.RecordBatchMemoryManager; import org.apache.drill.exec.record.RecordBatchSizer; import org.apache.drill.exec.record.TransferPair; import org.apache.drill.exec.record.TypedFieldId; import org.apache.drill.exec.record.VectorAccessibleUtilities; import org.apache.drill.exec.record.VectorWrapper; import org.apache.drill.exec.resolver.TypeCastRules; import org.apache.drill.exec.util.VectorUtil; import org.apache.drill.exec.vector.FixedWidthVector; import org.apache.drill.exec.vector.SchemaChangeCallBack; import org.apache.drill.exec.vector.ValueVector; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.Stack; public class UnionAllRecordBatch extends AbstractBinaryRecordBatch<UnionAll> { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UnionAllRecordBatch.class); private SchemaChangeCallBack callBack = new SchemaChangeCallBack(); private UnionAller unionall; private final List<TransferPair> transfers = Lists.newArrayList(); private List<ValueVector> allocationVectors = Lists.newArrayList(); private int recordCount = 0; private UnionInputIterator unionInputIterator; public UnionAllRecordBatch(UnionAll config, List<RecordBatch> children, FragmentContext context) throws OutOfMemoryException { super(config, context, true, children.get(0), children.get(1)); // get the output batch size from config. int configuredBatchSize = (int) context.getOptions().getOption(ExecConstants.OUTPUT_BATCH_SIZE_VALIDATOR); batchMemoryManager = new RecordBatchMemoryManager(numInputs, configuredBatchSize); logger.debug("BATCH_STATS, configured output batch size: {}", configuredBatchSize); } @Override protected void killIncoming(boolean sendUpstream) { left.kill(sendUpstream); right.kill(sendUpstream); } protected void buildSchema() throws SchemaChangeException { if (! prefetchFirstBatchFromBothSides()) { state = BatchState.DONE; return; } unionInputIterator = new UnionInputIterator(leftUpstream, left, rightUpstream, right); if (leftUpstream == IterOutcome.NONE && rightUpstream == IterOutcome.OK_NEW_SCHEMA) { inferOutputFieldsOneSide(right.getSchema()); } else if (rightUpstream == IterOutcome.NONE && leftUpstream == IterOutcome.OK_NEW_SCHEMA) { inferOutputFieldsOneSide((left.getSchema())); } else if (leftUpstream == IterOutcome.OK_NEW_SCHEMA && rightUpstream == IterOutcome.OK_NEW_SCHEMA) { inferOutputFieldsBothSide(left.getSchema(), right.getSchema()); } container.buildSchema(BatchSchema.SelectionVectorMode.NONE); VectorAccessibleUtilities.allocateVectors(container, 0); VectorAccessibleUtilities.setValueCount(container,0); } @Override public IterOutcome innerNext() { try { while (true) { if (!unionInputIterator.hasNext()) { return IterOutcome.NONE; } Pair<IterOutcome, BatchStatusWrappper> nextBatch = unionInputIterator.next(); IterOutcome upstream = nextBatch.left; BatchStatusWrappper batchStatus = nextBatch.right; switch (upstream) { case NONE: case OUT_OF_MEMORY: case STOP: return upstream; case OK_NEW_SCHEMA: return doWork(batchStatus, true); case OK: // skip batches with same schema as the previous one yet having 0 row. if (batchStatus.batch.getRecordCount() == 0) { VectorAccessibleUtilities.clear(batchStatus.batch); continue; } return doWork(batchStatus, false); default: throw new IllegalStateException(String.format("Unknown state %s.", upstream)); } } } catch (ClassTransformationException | IOException | SchemaChangeException ex) { context.getExecutorState().fail(ex); killIncoming(false); return IterOutcome.STOP; } } @Override public int getRecordCount() { return recordCount; } @SuppressWarnings("resource") private IterOutcome doWork(BatchStatusWrappper batchStatus, boolean newSchema) throws ClassTransformationException, IOException, SchemaChangeException { Preconditions.checkArgument(batchStatus.batch.getSchema().getFieldCount() == container.getSchema().getFieldCount(), "Input batch and output batch have different field counthas!"); if (newSchema) { createUnionAller(batchStatus.batch); } // Get number of records to include in the batch. final int recordsToProcess = Math.min(batchMemoryManager.getOutputRowCount(), batchStatus.getRemainingRecords()); container.zeroVectors(); batchMemoryManager.allocateVectors(allocationVectors, recordsToProcess); recordCount = unionall.unionRecords(batchStatus.recordsProcessed, recordsToProcess, 0); VectorUtil.setValueCount(allocationVectors, recordCount); // save number of records processed so far in batch status. batchStatus.recordsProcessed += recordCount; batchMemoryManager.updateOutgoingStats(recordCount); if (logger.isDebugEnabled()) { logger.debug("BATCH_STATS, outgoing: {}", new RecordBatchSizer(this)); } if (callBack.getSchemaChangedAndReset()) { return IterOutcome.OK_NEW_SCHEMA; } else { return IterOutcome.OK; } } private void createUnionAller(RecordBatch inputBatch) throws ClassTransformationException, IOException, SchemaChangeException { transfers.clear(); allocationVectors.clear(); final ClassGenerator<UnionAller> cg = CodeGenerator.getRoot(UnionAller.TEMPLATE_DEFINITION, context.getOptions()); cg.getCodeGenerator().plainJavaCapable(true); // cg.getCodeGenerator().saveCodeForDebugging(true); int index = 0; for(VectorWrapper<?> vw : inputBatch) { ValueVector vvIn = vw.getValueVector(); ValueVector vvOut = container.getValueVector(index).getValueVector(); final ErrorCollector collector = new ErrorCollectorImpl(); // According to input data names, Minortypes, Datamodes, choose to // transfer directly, // rename columns or // cast data types (Minortype or DataMode) if (container.getSchema().getColumn(index).hasSameTypeAndMode(vvIn.getField()) && vvIn.getField().getType().getMinorType() != TypeProtos.MinorType.MAP // Per DRILL-5521, existing bug for map transfer ) { // Transfer column TransferPair tp = vvIn.makeTransferPair(vvOut); transfers.add(tp); } else if (vvIn.getField().getType().getMinorType() == TypeProtos.MinorType.NULL) { continue; } else { // Copy data in order to rename the column SchemaPath inputPath = SchemaPath.getSimplePath(vvIn.getField().getName()); MaterializedField inField = vvIn.getField(); MaterializedField outputField = vvOut.getField(); LogicalExpression expr = ExpressionTreeMaterializer.materialize(inputPath, inputBatch, collector, context.getFunctionRegistry()); if (collector.hasErrors()) { throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString())); } // If the inputs' DataMode is required and the outputs' DataMode is not required // cast to the one with the least restriction if(inField.getType().getMode() == TypeProtos.DataMode.REQUIRED && outputField.getType().getMode() != TypeProtos.DataMode.REQUIRED) { expr = ExpressionTreeMaterializer.convertToNullableType(expr, inField.getType().getMinorType(), context.getFunctionRegistry(), collector); if (collector.hasErrors()) { throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString())); } } // If two inputs' MinorTypes are different, // Insert a cast before the Union operation if(inField.getType().getMinorType() != outputField.getType().getMinorType()) { expr = ExpressionTreeMaterializer.addCastExpression(expr, outputField.getType(), context.getFunctionRegistry(), collector); if (collector.hasErrors()) { throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString())); } } TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getName())); boolean useSetSafe = !(vvOut instanceof FixedWidthVector); ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, useSetSafe); cg.addExpr(write); allocationVectors.add(vvOut); } ++index; } unionall = context.getImplementationClass(cg.getCodeGenerator()); unionall.setup(context, inputBatch, this, transfers); } // The output table's column names always follow the left table, // where the output type is chosen based on DRILL's implicit casting rules private void inferOutputFieldsBothSide(final BatchSchema leftSchema, final BatchSchema rightSchema) { // outputFields = Lists.newArrayList(); final Iterator<MaterializedField> leftIter = leftSchema.iterator(); final Iterator<MaterializedField> rightIter = rightSchema.iterator(); int index = 1; while (leftIter.hasNext() && rightIter.hasNext()) { MaterializedField leftField = leftIter.next(); MaterializedField rightField = rightIter.next(); if (leftField.hasSameTypeAndMode(rightField)) { TypeProtos.MajorType.Builder builder = TypeProtos.MajorType.newBuilder().setMinorType(leftField.getType().getMinorType()).setMode(leftField.getDataMode()); builder = Types.calculateTypePrecisionAndScale(leftField.getType(), rightField.getType(), builder); container.addOrGet(MaterializedField.create(leftField.getName(), builder.build()), callBack); } else if (Types.isUntypedNull(rightField.getType())) { container.addOrGet(leftField, callBack); } else if (Types.isUntypedNull(leftField.getType())) { container.addOrGet(MaterializedField.create(leftField.getName(), rightField.getType()), callBack); } else { // If the output type is not the same, // cast the column of one of the table to a data type which is the Least Restrictive TypeProtos.MajorType.Builder builder = TypeProtos.MajorType.newBuilder(); if (leftField.getType().getMinorType() == rightField.getType().getMinorType()) { builder.setMinorType(leftField.getType().getMinorType()); builder = Types.calculateTypePrecisionAndScale(leftField.getType(), rightField.getType(), builder); } else { List<TypeProtos.MinorType> types = Lists.newLinkedList(); types.add(leftField.getType().getMinorType()); types.add(rightField.getType().getMinorType()); TypeProtos.MinorType outputMinorType = TypeCastRules.getLeastRestrictiveType(types); if (outputMinorType == null) { throw new DrillRuntimeException("Type mismatch between " + leftField.getType().getMinorType().toString() + " on the left side and " + rightField.getType().getMinorType().toString() + " on the right side in column " + index + " of UNION ALL"); } builder.setMinorType(outputMinorType); } // The output data mode should be as flexible as the more flexible one from the two input tables List<TypeProtos.DataMode> dataModes = Lists.newLinkedList(); dataModes.add(leftField.getType().getMode()); dataModes.add(rightField.getType().getMode()); builder.setMode(TypeCastRules.getLeastRestrictiveDataMode(dataModes)); container.addOrGet(MaterializedField.create(leftField.getName(), builder.build()), callBack); } ++index; } assert !leftIter.hasNext() && ! rightIter.hasNext() : "Mis-match of column count should have been detected when validating sqlNode at planning"; } private void inferOutputFieldsOneSide(final BatchSchema schema) { for (MaterializedField field : schema) { container.addOrGet(field, callBack); } } private static boolean hasSameTypeAndMode(MaterializedField leftField, MaterializedField rightField) { return (leftField.getType().getMinorType() == rightField.getType().getMinorType()) && (leftField.getType().getMode() == rightField.getType().getMode()); } private class BatchStatusWrappper { boolean prefetched; final RecordBatch batch; final int inputIndex; final IterOutcome outcome; int recordsProcessed; int totalRecordsToProcess; BatchStatusWrappper(boolean prefetched, IterOutcome outcome, RecordBatch batch, int inputIndex) { this.prefetched = prefetched; this.outcome = outcome; this.batch = batch; this.inputIndex = inputIndex; this.totalRecordsToProcess = batch.getRecordCount(); this.recordsProcessed = 0; } public int getRemainingRecords() { return (totalRecordsToProcess - recordsProcessed); } } private class UnionInputIterator implements Iterator<Pair<IterOutcome, BatchStatusWrappper>> { private Stack<BatchStatusWrappper> batchStatusStack = new Stack<>(); UnionInputIterator(IterOutcome leftOutCome, RecordBatch left, IterOutcome rightOutCome, RecordBatch right) { if (rightOutCome == IterOutcome.OK_NEW_SCHEMA) { batchStatusStack.push(new BatchStatusWrappper(true, IterOutcome.OK_NEW_SCHEMA, right, 1)); } if (leftOutCome == IterOutcome.OK_NEW_SCHEMA) { batchStatusStack.push(new BatchStatusWrappper(true, IterOutcome.OK_NEW_SCHEMA, left, 0)); } } @Override public boolean hasNext() { return ! batchStatusStack.isEmpty(); } @Override public Pair<IterOutcome, BatchStatusWrappper> next() { while (!batchStatusStack.isEmpty()) { BatchStatusWrappper topStatus = batchStatusStack.peek(); if (topStatus.prefetched) { topStatus.prefetched = false; batchMemoryManager.update(topStatus.batch, topStatus.inputIndex); logger.debug("BATCH_STATS, incoming {}: {}", topStatus.inputIndex == 0 ? "left" : "right", batchMemoryManager.getRecordBatchSizer(topStatus.inputIndex)); return Pair.of(topStatus.outcome, topStatus); } else { // If we have more records to process, just return the top batch. if (topStatus.getRemainingRecords() > 0) { return Pair.of(IterOutcome.OK, topStatus); } IterOutcome outcome = UnionAllRecordBatch.this.next(topStatus.inputIndex, topStatus.batch); switch (outcome) { case OK: case OK_NEW_SCHEMA: // since we just read a new batch, update memory manager and initialize batch stats. topStatus.recordsProcessed = 0; topStatus.totalRecordsToProcess = topStatus.batch.getRecordCount(); batchMemoryManager.update(topStatus.batch, topStatus.inputIndex); logger.debug("BATCH_STATS, incoming {}: {}", topStatus.inputIndex == 0 ? "left" : "right", batchMemoryManager.getRecordBatchSizer(topStatus.inputIndex)); return Pair.of(outcome, topStatus); case OUT_OF_MEMORY: case STOP: batchStatusStack.pop(); return Pair.of(outcome, topStatus); case NONE: batchStatusStack.pop(); if (batchStatusStack.isEmpty()) { return Pair.of(IterOutcome.NONE, null); } break; default: throw new IllegalStateException(String.format("Unexpected state %s", outcome)); } } } throw new NoSuchElementException(); } @Override public void remove() { throw new UnsupportedOperationException(); } } @Override public void close() { super.close(); updateBatchMemoryManagerStats(); if (logger.isDebugEnabled()) { logger.debug("BATCH_STATS, incoming aggregate left: batch count : {}, avg bytes : {}, avg row bytes : {}, record count : {}", batchMemoryManager.getNumIncomingBatches(JoinBatchMemoryManager.LEFT_INDEX), batchMemoryManager.getAvgInputBatchSize(JoinBatchMemoryManager.LEFT_INDEX), batchMemoryManager.getAvgInputRowWidth(JoinBatchMemoryManager.LEFT_INDEX), batchMemoryManager.getTotalInputRecords(JoinBatchMemoryManager.LEFT_INDEX)); logger.debug("BATCH_STATS, incoming aggregate right: batch count : {}, avg bytes : {}, avg row bytes : {}, record count : {}", batchMemoryManager.getNumIncomingBatches(JoinBatchMemoryManager.RIGHT_INDEX), batchMemoryManager.getAvgInputBatchSize(JoinBatchMemoryManager.RIGHT_INDEX), batchMemoryManager.getAvgInputRowWidth(JoinBatchMemoryManager.RIGHT_INDEX), batchMemoryManager.getTotalInputRecords(JoinBatchMemoryManager.RIGHT_INDEX)); logger.debug("BATCH_STATS, outgoing aggregate: batch count : {}, avg bytes : {}, avg row bytes : {}, record count : {}", batchMemoryManager.getNumOutgoingBatches(), batchMemoryManager.getAvgOutputBatchSize(), batchMemoryManager.getAvgOutputRowWidth(), batchMemoryManager.getTotalOutputRecords()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.stat; import java.io.Serializable; import java.text.NumberFormat; import java.util.Collection; import java.util.Iterator; import java.util.Comparator; import java.util.Map; import java.util.TreeMap; import org.apache.commons.math3.exception.MathIllegalArgumentException; import org.apache.commons.math3.exception.NullArgumentException; import org.apache.commons.math3.exception.util.LocalizedFormats; import org.apache.commons.math3.util.MathUtils; /** * Maintains a frequency distribution. * <p> * Accepts int, long, char or Comparable values. New values added must be * comparable to those that have been added, otherwise the add method will * throw an IllegalArgumentException.</p> * <p> * Integer values (int, long, Integer, Long) are not distinguished by type -- * i.e. <code>addValue(Long.valueOf(2)), addValue(2), addValue(2l)</code> all have * the same effect (similarly for arguments to <code>getCount,</code> etc.).</p> * <p> * char values are converted by <code>addValue</code> to Character instances. * As such, these values are not comparable to integral values, so attempts * to combine integral types with chars in a frequency distribution will fail. * </p> * <p> * The values are ordered using the default (natural order), unless a * <code>Comparator</code> is supplied in the constructor.</p> * * @version $Id: Frequency.java 1455703 2013-03-12 20:46:23Z tn $ */ public class Frequency implements Serializable { /** Serializable version identifier */ private static final long serialVersionUID = -3845586908418844111L; /** underlying collection */ private final TreeMap<Comparable<?>, Long> freqTable; /** * Default constructor. */ public Frequency() { freqTable = new TreeMap<Comparable<?>, Long>(); } /** * Constructor allowing values Comparator to be specified. * * @param comparator Comparator used to order values */ @SuppressWarnings("unchecked") // TODO is the cast OK? public Frequency(Comparator<?> comparator) { freqTable = new TreeMap<Comparable<?>, Long>((Comparator<? super Comparable<?>>) comparator); } /** * Return a string representation of this frequency * distribution. * * @return a string representation. */ @Override public String toString() { NumberFormat nf = NumberFormat.getPercentInstance(); StringBuilder outBuffer = new StringBuilder(); outBuffer.append("Value \t Freq. \t Pct. \t Cum Pct. \n"); Iterator<Comparable<?>> iter = freqTable.keySet().iterator(); while (iter.hasNext()) { Comparable<?> value = iter.next(); outBuffer.append(value); outBuffer.append('\t'); outBuffer.append(getCount(value)); outBuffer.append('\t'); outBuffer.append(nf.format(getPct(value))); outBuffer.append('\t'); outBuffer.append(nf.format(getCumPct(value))); outBuffer.append('\n'); } return outBuffer.toString(); } /** * Adds 1 to the frequency count for v. * <p> * If other objects have already been added to this Frequency, v must * be comparable to those that have already been added. * </p> * * @param v the value to add. * @throws MathIllegalArgumentException if <code>v</code> is not comparable with previous entries */ public void addValue(Comparable<?> v) throws MathIllegalArgumentException { incrementValue(v, 1); } /** * Increments the frequency count for v. * <p> * If other objects have already been added to this Frequency, v must * be comparable to those that have already been added. * </p> * * @param v the value to add. * @param increment the amount by which the value should be incremented * @throws IllegalArgumentException if <code>v</code> is not comparable with previous entries * @since 3.1 */ public void incrementValue(Comparable<?> v, long increment){ Comparable<?> obj = v; if (v instanceof Integer) { obj = Long.valueOf(((Integer) v).longValue()); } try { Long count = freqTable.get(obj); if (count == null) { freqTable.put(obj, Long.valueOf(increment)); } else { freqTable.put(obj, Long.valueOf(count.longValue() + increment)); } } catch (ClassCastException ex) { //TreeMap will throw ClassCastException if v is not comparable throw new MathIllegalArgumentException( LocalizedFormats.INSTANCES_NOT_COMPARABLE_TO_EXISTING_VALUES, v.getClass().getName()); } } /** * Adds 1 to the frequency count for v. * * @param v the value to add. * @throws MathIllegalArgumentException if the table contains entries not * comparable to Integer */ public void addValue(int v) throws MathIllegalArgumentException { addValue(Long.valueOf(v)); } /** * Adds 1 to the frequency count for v. * * @param v the value to add. * @throws MathIllegalArgumentException if the table contains entries not * comparable to Long */ public void addValue(long v) throws MathIllegalArgumentException { addValue(Long.valueOf(v)); } /** * Adds 1 to the frequency count for v. * * @param v the value to add. * @throws MathIllegalArgumentException if the table contains entries not * comparable to Char */ public void addValue(char v) throws MathIllegalArgumentException { addValue(Character.valueOf(v)); } /** Clears the frequency table */ public void clear() { freqTable.clear(); } /** * Returns an Iterator over the set of values that have been added. * <p> * If added values are integral (i.e., integers, longs, Integers, or Longs), * they are converted to Longs when they are added, so the objects returned * by the Iterator will in this case be Longs.</p> * * @return values Iterator */ public Iterator<Comparable<?>> valuesIterator() { return freqTable.keySet().iterator(); } /** * Return an Iterator over the set of keys and values that have been added. * Using the entry set to iterate is more efficient in the case where you * need to access respective counts as well as values, since it doesn't * require a "get" for every key...the value is provided in the Map.Entry. * <p> * If added values are integral (i.e., integers, longs, Integers, or Longs), * they are converted to Longs when they are added, so the values of the * map entries returned by the Iterator will in this case be Longs.</p> * * @return entry set Iterator * @since 3.1 */ public Iterator<Map.Entry<Comparable<?>, Long>> entrySetIterator() { return freqTable.entrySet().iterator(); } //------------------------------------------------------------------------- /** * Returns the sum of all frequencies. * * @return the total frequency count. */ public long getSumFreq() { long result = 0; Iterator<Long> iterator = freqTable.values().iterator(); while (iterator.hasNext()) { result += iterator.next().longValue(); } return result; } /** * Returns the number of values = v. * Returns 0 if the value is not comparable. * * @param v the value to lookup. * @return the frequency of v. */ public long getCount(Comparable<?> v) { if (v instanceof Integer) { return getCount(((Integer) v).longValue()); } long result = 0; try { Long count = freqTable.get(v); if (count != null) { result = count.longValue(); } } catch (ClassCastException ex) { // NOPMD // ignore and return 0 -- ClassCastException will be thrown if value is not comparable } return result; } /** * Returns the number of values = v. * * @param v the value to lookup. * @return the frequency of v. */ public long getCount(int v) { return getCount(Long.valueOf(v)); } /** * Returns the number of values = v. * * @param v the value to lookup. * @return the frequency of v. */ public long getCount(long v) { return getCount(Long.valueOf(v)); } /** * Returns the number of values = v. * * @param v the value to lookup. * @return the frequency of v. */ public long getCount(char v) { return getCount(Character.valueOf(v)); } /** * Returns the number of values in the frequency table. * * @return the number of unique values that have been added to the frequency table. * @see #valuesIterator() */ public int getUniqueCount(){ return freqTable.keySet().size(); } /** * Returns the percentage of values that are equal to v * (as a proportion between 0 and 1). * <p> * Returns <code>Double.NaN</code> if no values have been added.</p> * * @param v the value to lookup * @return the proportion of values equal to v */ public double getPct(Comparable<?> v) { final long sumFreq = getSumFreq(); if (sumFreq == 0) { return Double.NaN; } return (double) getCount(v) / (double) sumFreq; } /** * Returns the percentage of values that are equal to v * (as a proportion between 0 and 1). * * @param v the value to lookup * @return the proportion of values equal to v */ public double getPct(int v) { return getPct(Long.valueOf(v)); } /** * Returns the percentage of values that are equal to v * (as a proportion between 0 and 1). * * @param v the value to lookup * @return the proportion of values equal to v */ public double getPct(long v) { return getPct(Long.valueOf(v)); } /** * Returns the percentage of values that are equal to v * (as a proportion between 0 and 1). * * @param v the value to lookup * @return the proportion of values equal to v */ public double getPct(char v) { return getPct(Character.valueOf(v)); } //----------------------------------------------------------------------------------------- /** * Returns the cumulative frequency of values less than or equal to v. * <p> * Returns 0 if v is not comparable to the values set.</p> * * @param v the value to lookup. * @return the proportion of values equal to v */ @SuppressWarnings({ "rawtypes", "unchecked" }) public long getCumFreq(Comparable<?> v) { if (getSumFreq() == 0) { return 0; } if (v instanceof Integer) { return getCumFreq(((Integer) v).longValue()); } Comparator<Comparable<?>> c = (Comparator<Comparable<?>>) freqTable.comparator(); if (c == null) { c = new NaturalComparator(); } long result = 0; try { Long value = freqTable.get(v); if (value != null) { result = value.longValue(); } } catch (ClassCastException ex) { return result; // v is not comparable } if (c.compare(v, freqTable.firstKey()) < 0) { return 0; // v is comparable, but less than first value } if (c.compare(v, freqTable.lastKey()) >= 0) { return getSumFreq(); // v is comparable, but greater than the last value } Iterator<Comparable<?>> values = valuesIterator(); while (values.hasNext()) { Comparable<?> nextValue = values.next(); if (c.compare(v, nextValue) > 0) { result += getCount(nextValue); } else { return result; } } return result; } /** * Returns the cumulative frequency of values less than or equal to v. * <p> * Returns 0 if v is not comparable to the values set.</p> * * @param v the value to lookup * @return the proportion of values equal to v */ public long getCumFreq(int v) { return getCumFreq(Long.valueOf(v)); } /** * Returns the cumulative frequency of values less than or equal to v. * <p> * Returns 0 if v is not comparable to the values set.</p> * * @param v the value to lookup * @return the proportion of values equal to v */ public long getCumFreq(long v) { return getCumFreq(Long.valueOf(v)); } /** * Returns the cumulative frequency of values less than or equal to v. * <p> * Returns 0 if v is not comparable to the values set.</p> * * @param v the value to lookup * @return the proportion of values equal to v */ public long getCumFreq(char v) { return getCumFreq(Character.valueOf(v)); } //---------------------------------------------------------------------------------------------- /** * Returns the cumulative percentage of values less than or equal to v * (as a proportion between 0 and 1). * <p> * Returns <code>Double.NaN</code> if no values have been added. * Returns 0 if at least one value has been added, but v is not comparable * to the values set.</p> * * @param v the value to lookup * @return the proportion of values less than or equal to v */ public double getCumPct(Comparable<?> v) { final long sumFreq = getSumFreq(); if (sumFreq == 0) { return Double.NaN; } return (double) getCumFreq(v) / (double) sumFreq; } /** * Returns the cumulative percentage of values less than or equal to v * (as a proportion between 0 and 1). * <p> * Returns 0 if v is not comparable to the values set.</p> * * @param v the value to lookup * @return the proportion of values less than or equal to v */ public double getCumPct(int v) { return getCumPct(Long.valueOf(v)); } /** * Returns the cumulative percentage of values less than or equal to v * (as a proportion between 0 and 1). * <p> * Returns 0 if v is not comparable to the values set.</p> * * @param v the value to lookup * @return the proportion of values less than or equal to v */ public double getCumPct(long v) { return getCumPct(Long.valueOf(v)); } /** * Returns the cumulative percentage of values less than or equal to v * (as a proportion between 0 and 1). * <p> * Returns 0 if v is not comparable to the values set.</p> * * @param v the value to lookup * @return the proportion of values less than or equal to v */ public double getCumPct(char v) { return getCumPct(Character.valueOf(v)); } //---------------------------------------------------------------------------------------------- /** * Merge another Frequency object's counts into this instance. * This Frequency's counts will be incremented (or set when not already set) * by the counts represented by other. * * @param other the other {@link Frequency} object to be merged * @throws NullArgumentException if {@code other} is null * @since 3.1 */ public void merge(final Frequency other) throws NullArgumentException { MathUtils.checkNotNull(other, LocalizedFormats.NULL_NOT_ALLOWED); final Iterator<Map.Entry<Comparable<?>, Long>> iter = other.entrySetIterator(); while (iter.hasNext()) { final Map.Entry<Comparable<?>, Long> entry = iter.next(); incrementValue(entry.getKey(), entry.getValue()); } } /** * Merge a {@link Collection} of {@link Frequency} objects into this instance. * This Frequency's counts will be incremented (or set when not already set) * by the counts represented by each of the others. * * @param others the other {@link Frequency} objects to be merged * @throws NullArgumentException if the collection is null * @since 3.1 */ public void merge(final Collection<Frequency> others) throws NullArgumentException { MathUtils.checkNotNull(others, LocalizedFormats.NULL_NOT_ALLOWED); for (final Frequency freq : others) { merge(freq); } } //---------------------------------------------------------------------------------------------- /** * A Comparator that compares comparable objects using the * natural order. Copied from Commons Collections ComparableComparator. */ private static class NaturalComparator<T extends Comparable<T>> implements Comparator<Comparable<T>>, Serializable { /** Serializable version identifier */ private static final long serialVersionUID = -3852193713161395148L; /** * Compare the two {@link Comparable Comparable} arguments. * This method is equivalent to: * <pre>(({@link Comparable Comparable})o1).{@link Comparable#compareTo compareTo}(o2)</pre> * * @param o1 the first object * @param o2 the second object * @return result of comparison * @throws NullPointerException when <i>o1</i> is <code>null</code>, * or when <code>((Comparable)o1).compareTo(o2)</code> does * @throws ClassCastException when <i>o1</i> is not a {@link Comparable Comparable}, * or when <code>((Comparable)o1).compareTo(o2)</code> does */ @SuppressWarnings("unchecked") // cast to (T) may throw ClassCastException, see Javadoc public int compare(Comparable<T> o1, Comparable<T> o2) { return o1.compareTo((T) o2); } } /** {@inheritDoc} */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((freqTable == null) ? 0 : freqTable.hashCode()); return result; } /** {@inheritDoc} */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof Frequency)) { return false; } Frequency other = (Frequency) obj; if (freqTable == null) { if (other.freqTable != null) { return false; } } else if (!freqTable.equals(other.freqTable)) { return false; } return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.util.Collections; import java.util.concurrent.Callable; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheMemoryMode; import org.apache.ignite.cache.CachePeekMode; import org.apache.ignite.cache.eviction.lru.LruEvictionPolicy; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.util.typedef.CAX; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.spi.swapspace.file.FileSwapSpaceSpi; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.eclipse.jetty.util.ConcurrentHashSet; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * */ public class CacheSwapUnswapGetTest extends GridCommonAbstractTest { /** */ private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** */ private static final long DURATION = 30_000; /** */ private static final long OFFHEAP_MEM = 1000; /** */ private static final int MAX_HEAP_SIZE = 100; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder); cfg.setSwapSpaceSpi(new FileSwapSpaceSpi()); return cfg; } /** * @param atomicityMode Cache atomicity mode. * @param memMode Cache memory mode. * @param swap {@code True} if swap enabled. * @return Cache configuration. */ private CacheConfiguration<Integer, String> cacheConfiguration(CacheAtomicityMode atomicityMode, CacheMemoryMode memMode, boolean swap) { CacheConfiguration<Integer, String> ccfg = new CacheConfiguration<>(); ccfg.setAtomicityMode(atomicityMode); ccfg.setWriteSynchronizationMode(FULL_SYNC); ccfg.setMemoryMode(memMode); if (memMode == CacheMemoryMode.ONHEAP_TIERED) { LruEvictionPolicy plc = new LruEvictionPolicy(); plc.setMaxSize(MAX_HEAP_SIZE); ccfg.setEvictionPolicy(plc); } if (swap) { ccfg.setSwapEnabled(true); ccfg.setOffHeapMaxMemory(OFFHEAP_MEM); } else ccfg.setOffHeapMaxMemory(0); return ccfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); startGrid(0); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { super.afterTestsStopped(); stopAllGrids(); } /** {@inheritDoc} */ @Override protected long getTestTimeout() { return DURATION + 60_000; } /** * @throws Exception If failed. */ public void testTxCacheOffheapEvict() throws Exception { swapUnswap(TRANSACTIONAL, CacheMemoryMode.ONHEAP_TIERED, false); } /** * @throws Exception If failed. */ public void testTxCacheOffheapTiered() throws Exception { swapUnswap(TRANSACTIONAL, CacheMemoryMode.OFFHEAP_TIERED, false); } /** * @throws Exception If failed. */ public void testTxCacheOffheapSwapEvict() throws Exception { swapUnswap(TRANSACTIONAL, CacheMemoryMode.ONHEAP_TIERED, true); } /** * @throws Exception If failed. */ public void testTxCacheOffheapTieredSwapEvict() throws Exception { swapUnswap(TRANSACTIONAL, CacheMemoryMode.OFFHEAP_TIERED, true); } /** * @throws Exception If failed. */ public void testAtomicCacheOffheapEvict() throws Exception { swapUnswap(ATOMIC, CacheMemoryMode.ONHEAP_TIERED, false); } /** * @throws Exception If failed. */ public void testAtomicCacheOffheapTiered() throws Exception { swapUnswap(ATOMIC, CacheMemoryMode.OFFHEAP_TIERED, false); } /** * @throws Exception If failed. */ public void testAtomicCacheOffheapSwapEvict() throws Exception { swapUnswap(ATOMIC, CacheMemoryMode.ONHEAP_TIERED, true); } /** * @throws Exception If failed. */ public void testAtomicCacheOffheapTieredSwapEvict() throws Exception { swapUnswap(ATOMIC, CacheMemoryMode.OFFHEAP_TIERED, true); } /** * @param atomicityMode Cache atomicity mode. * @param memMode Cache memory mode. * @param swap {@code True} if swap enabled. * @throws Exception If failed. */ private void swapUnswap(CacheAtomicityMode atomicityMode, CacheMemoryMode memMode, boolean swap) throws Exception { log.info("Start test [mode=" + atomicityMode + ", swap=" + swap + ']'); int threadCnt = 20; final int keyCnt = 1000; final int valCnt = 10000; final Ignite g = grid(0); final ConcurrentHashSet<Integer> keys = new ConcurrentHashSet<>(); final AtomicBoolean done = new AtomicBoolean(); g.destroyCache(null); final IgniteCache<Integer, String> cache = g.createCache(cacheConfiguration(atomicityMode, memMode, swap)); try { IgniteInternalFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { ThreadLocalRandom rnd = ThreadLocalRandom.current(); while (!done.get()) { Integer key = rnd.nextInt(keyCnt); switch (rnd.nextInt(3)) { case 0: cache.put(key, String.valueOf(rnd.nextInt(valCnt))); keys.add(key); break; case 1: cache.localEvict(Collections.singletonList(key)); break; case 2: if (keys.contains(key)) { String val = cache.get(key); assertNotNull(val); } break; default: assert false; } } } }, threadCnt, "update-thread"); IgniteInternalFuture<?> getFut = GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { Thread.currentThread().setName("get-thread"); while (!done.get()) { for (Integer key : keys) { String val = cache.get(key); assertNotNull(val); } } return null; } }); long endTime = System.currentTimeMillis() + DURATION; while (System.currentTimeMillis() < endTime) { Thread.sleep(5000); log.info("Cache size [heap=" + cache.localSize(CachePeekMode.ONHEAP) + ", offheap=" + cache.localSize(CachePeekMode.OFFHEAP) + ", swap=" + cache.localSize(CachePeekMode.SWAP) + ", total=" + cache.localSize() + ", offheapMem=" + cache.localMetrics().getOffHeapAllocatedSize() + ']'); } done.set(true); fut.get(); getFut.get(); for (Integer key : keys) { String val = cache.get(key); assertNotNull(val); } int onheapSize = cache.localSize(CachePeekMode.ONHEAP); int offheapSize = cache.localSize(CachePeekMode.OFFHEAP); int swapSize = cache.localSize(CachePeekMode.SWAP); int total = cache.localSize(); long offheapMem = cache.localMetrics().getOffHeapAllocatedSize(); log.info("Cache size [heap=" + onheapSize + ", offheap=" + offheapSize + ", swap=" + swapSize + ", total=" + total + ", offheapMem=" + offheapMem + ']'); assertTrue(total > 0); assertEquals(onheapSize + offheapSize + swapSize, total); if (memMode == CacheMemoryMode.OFFHEAP_TIERED) assertEquals(0, onheapSize); else assertEquals(MAX_HEAP_SIZE, onheapSize); if (swap) { assertTrue(swapSize > 0); assertTrue(offheapMem <= OFFHEAP_MEM); } else assertEquals(0, swapSize); } finally { done.set(true); } } }
// Copyright (C) 2011 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.git.meta; import com.google.gerrit.common.Nullable; import com.google.gerrit.entities.Project; import com.google.gerrit.server.GerritPersonIdent; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.extensions.events.GitReferenceUpdated; import com.google.gerrit.server.git.GitRepositoryManager; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Singleton; import com.google.inject.assistedinject.Assisted; import java.io.IOException; import org.eclipse.jgit.errors.RepositoryNotFoundException; import org.eclipse.jgit.lib.BatchRefUpdate; import org.eclipse.jgit.lib.CommitBuilder; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.lib.RefUpdate; import org.eclipse.jgit.lib.Repository; /** Helps with the updating of a {@link VersionedMetaData}. */ public class MetaDataUpdate implements AutoCloseable { @Singleton public static class User { private final InternalFactory factory; private final GitRepositoryManager mgr; private final Provider<PersonIdent> serverIdentProvider; private final Provider<IdentifiedUser> identifiedUser; @Inject User( InternalFactory factory, GitRepositoryManager mgr, @GerritPersonIdent Provider<PersonIdent> serverIdentProvider, Provider<IdentifiedUser> identifiedUser) { this.factory = factory; this.mgr = mgr; this.serverIdentProvider = serverIdentProvider; this.identifiedUser = identifiedUser; } public PersonIdent getUserPersonIdent() { return createPersonIdent(identifiedUser.get()); } public MetaDataUpdate create(Project.NameKey name) throws RepositoryNotFoundException, IOException { return create(name, identifiedUser.get()); } public MetaDataUpdate create(Project.NameKey name, IdentifiedUser user) throws RepositoryNotFoundException, IOException { return create(name, user, null); } /** * Create an update using an existing batch ref update. * * <p>This allows batching together updates to multiple metadata refs. For making multiple * commits to a single metadata ref, see {@link VersionedMetaData#openUpdate(MetaDataUpdate)}. * * @param name project name. * @param user user for the update. * @param batch batch update to use; the caller is responsible for committing the update. */ public MetaDataUpdate create(Project.NameKey name, IdentifiedUser user, BatchRefUpdate batch) throws RepositoryNotFoundException, IOException { Repository repo = mgr.openRepository(name); MetaDataUpdate md = create(name, repo, user, batch); md.setCloseRepository(true); return md; } /** * Create an update using an existing batch ref update. * * <p>This allows batching together updates to multiple metadata refs. For making multiple * commits to a single metadata ref, see {@link VersionedMetaData#openUpdate(MetaDataUpdate)}. * * <p>Important: Create a new MetaDataUpdate instance for each update: * * <pre> * <code> * try (Repository repo = repoMgr.openRepository(allUsersName); * RevWalk rw = new RevWalk(repo)) { * BatchRefUpdate batchUpdate = repo.getRefDatabase().newBatchUpdate(); * // WRONG: create the MetaDataUpdate instance here and reuse it for * // all updates in the loop * for{@code (Map.Entry<Account.Id, DiffPreferencesInfo> e : diffPrefsFromDb)} { * // CORRECT: create a new MetaDataUpdate instance for each update * try (MetaDataUpdate md = * metaDataUpdateFactory.create(allUsersName, batchUpdate)) { * md.setMessage("Import diff preferences from reviewdb\n"); * VersionedAccountPreferences vPrefs = * VersionedAccountPreferences.forUser(e.getKey()); * storeSection(vPrefs.getConfig(), UserConfigSections.DIFF, null, * e.getValue(), DiffPreferencesInfo.defaults()); * vPrefs.commit(md); * } catch (ConfigInvalidException e) { * // TODO handle exception * } * } * batchUpdate.execute(rw, NullProgressMonitor.INSTANCE); * } * </code> * </pre> * * @param name project name. * @param repository the repository to update; the caller is responsible for closing the * repository. * @param user user for the update. * @param batch batch update to use; the caller is responsible for committing the update. */ public MetaDataUpdate create( Project.NameKey name, Repository repository, IdentifiedUser user, BatchRefUpdate batch) { MetaDataUpdate md = factory.create(name, repository, batch); md.getCommitBuilder().setCommitter(serverIdentProvider.get()); md.setAuthor(user); return md; } private PersonIdent createPersonIdent(IdentifiedUser user) { PersonIdent serverIdent = serverIdentProvider.get(); return user.newCommitterIdent(serverIdent); } } @Singleton public static class Server { private final InternalFactory factory; private final GitRepositoryManager mgr; private final Provider<PersonIdent> serverIdentProvider; @Inject Server( InternalFactory factory, GitRepositoryManager mgr, @GerritPersonIdent Provider<PersonIdent> serverIdentProvider) { this.factory = factory; this.mgr = mgr; this.serverIdentProvider = serverIdentProvider; } public MetaDataUpdate create(Project.NameKey name) throws RepositoryNotFoundException, IOException { return create(name, null); } /** See {@link User#create(Project.NameKey, IdentifiedUser, BatchRefUpdate)} */ public MetaDataUpdate create(Project.NameKey name, BatchRefUpdate batch) throws RepositoryNotFoundException, IOException { Repository repo = mgr.openRepository(name); MetaDataUpdate md = factory.create(name, repo, batch); md.setCloseRepository(true); PersonIdent serverIdent = serverIdentProvider.get(); md.getCommitBuilder().setAuthor(serverIdent); md.getCommitBuilder().setCommitter(serverIdent); return md; } } public interface InternalFactory { MetaDataUpdate create( @Assisted Project.NameKey projectName, @Assisted Repository repository, @Assisted @Nullable BatchRefUpdate batch); } private final GitReferenceUpdated gitRefUpdated; private final Project.NameKey projectName; private final Repository repository; private final BatchRefUpdate batch; private final CommitBuilder commit; private boolean allowEmpty; private boolean insertChangeId; private boolean closeRepository; private IdentifiedUser author; @Inject public MetaDataUpdate( GitReferenceUpdated gitRefUpdated, @Assisted Project.NameKey projectName, @Assisted Repository repository, @Assisted @Nullable BatchRefUpdate batch) { this.gitRefUpdated = gitRefUpdated; this.projectName = projectName; this.repository = repository; this.batch = batch; this.commit = new CommitBuilder(); } public MetaDataUpdate( GitReferenceUpdated gitRefUpdated, Project.NameKey projectName, Repository repository) { this(gitRefUpdated, projectName, repository, null); } /** Set the commit message used when committing the update. */ public void setMessage(String message) { getCommitBuilder().setMessage(message); } public void setAuthor(IdentifiedUser author) { this.author = author; getCommitBuilder().setAuthor(author.newCommitterIdent(getCommitBuilder().getCommitter())); } public void setAllowEmpty(boolean allowEmpty) { this.allowEmpty = allowEmpty; } public void setInsertChangeId(boolean insertChangeId) { this.insertChangeId = insertChangeId; } public void setCloseRepository(boolean closeRepository) { this.closeRepository = closeRepository; } /** Returns batch in which to run the update, or {@code null} for no batch. */ BatchRefUpdate getBatch() { return batch; } /** Close the cached Repository handle. */ @Override public void close() { if (closeRepository) { getRepository().close(); } } public Project.NameKey getProjectName() { return projectName; } public Repository getRepository() { return repository; } boolean allowEmpty() { return allowEmpty; } boolean insertChangeId() { return insertChangeId; } public CommitBuilder getCommitBuilder() { return commit; } protected void fireGitRefUpdatedEvent(RefUpdate ru) { gitRefUpdated.fire(projectName, ru, author == null ? null : author.state()); } }
/* * Copyright 2015 Hippo Seven * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hippo.nimingban.ui; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.text.TextUtils; import android.view.Gravity; import android.view.View; import com.hippo.nimingban.GuideHelper; import com.hippo.nimingban.R; import com.hippo.nimingban.client.data.Site; import com.hippo.nimingban.ui.fragment.BaseFragment; import com.hippo.nimingban.ui.fragment.FragmentHost; import com.hippo.nimingban.ui.fragment.PostFragment; import com.hippo.nimingban.ui.fragment.TypeSendFragment; import com.hippo.nimingban.util.Settings; import com.hippo.nimingban.widget.PostLayout; import com.hippo.yorozuya.LayoutUtils; import com.hippo.yorozuya.ResourcesUtils; public final class PostActivity extends SwipeBackActivity implements FragmentHost, PostFragment.Callback, TypeSendFragment.Callback { public static final String ACTION_POST = "com.hippo.nimingban.ui.PostActivity.action.POST"; public static final String ACTION_SITE_ID = "com.hippo.nimingban.ui.PostActivity.action.SITE_ID"; public static final String ACTION_SITE_REPLY_ID = "com.hippo.nimingban.ui.PostActivity.action.SITE_REPLY_ID"; public static final String KEY_POST = "post"; public static final String KEY_SITE = "site"; public static final String KEY_ID = "id"; public static final String TAG_FRAGMENT_POST = "post"; public static final String TAG_FRAGMENT_TYPE_SEND = "type_send"; private PostLayout mPostLayout; @Override protected int getLightThemeResId() { return Settings.getColorStatusBar() ? R.style.SwipeActivity : R.style.SwipeActivity_NoStatus; } @Override protected int getDarkThemeResId() { return Settings.getColorStatusBar() ? R.style.SwipeActivity_Dark : R.style.SwipeActivity_Dark_NoStatus; } private Bundle createArgs() { Bundle bundle = new Bundle(); Intent intent = getIntent(); if (intent != null) { bundle.putString(PostFragment.KEY_ACTION, intent.getAction()); bundle.putParcelable(PostFragment.KEY_DATA, intent.getData()); bundle.putInt(PostFragment.KEY_SITE, intent.getIntExtra(KEY_SITE, -1)); bundle.putString(PostFragment.KEY_ID, intent.getStringExtra(KEY_ID)); bundle.putParcelable(PostFragment.KEY_POST, intent.getParcelableExtra(KEY_POST)); } return bundle; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setStatusBarColor(ResourcesUtils.getAttrColor(this, R.attr.colorPrimaryDark)); setContentView(R.layout.activity_post); mPostLayout = (PostLayout) findViewById(R.id.fragment_container); if (mPostLayout != null) { if (savedInstanceState == null) { PostFragment postFragment = new PostFragment(); postFragment.setArguments(createArgs()); postFragment.setFragmentHost(this); postFragment.setCallback(this); FragmentTransaction transaction = getSupportFragmentManager().beginTransaction(); transaction.add(R.id.fragment_container, postFragment, TAG_FRAGMENT_POST); transaction.commitAllowingStateLoss(); } else { FragmentManager fragmentManager = getSupportFragmentManager(); PostFragment postFragment = (PostFragment) fragmentManager.findFragmentByTag(TAG_FRAGMENT_POST); if (postFragment != null) { postFragment.setFragmentHost(this); postFragment.setCallback(this); } TypeSendFragment typeSendFragment = (TypeSendFragment) fragmentManager.findFragmentByTag(TAG_FRAGMENT_TYPE_SEND); if (typeSendFragment != null) { typeSendFragment.setFragmentHost(this); typeSendFragment.setCallback(this); } } } } @Override public void onBackPressed() { FragmentManager fragmentManager = getSupportFragmentManager(); Fragment fragment = fragmentManager.findFragmentByTag(TAG_FRAGMENT_TYPE_SEND); if (fragment != null && fragment instanceof TypeSendFragment) { TypeSendFragment typeSendFragment = (TypeSendFragment) fragment; if (mPostLayout.getTypeSendState() == PostLayout.STATE_HIDE) { mPostLayout.showTypeSend(); } else if (typeSendFragment.checkBeforeFinish()) { typeSendFragment.getFragmentHost().finishFragment(typeSendFragment); } } else { super.onBackPressed(); } } private void showSwipeGuide() { new GuideHelper.Builder(this) .setColor(ResourcesUtils.getAttrColor(this, R.attr.colorPrimary)) .setPadding(LayoutUtils.dp2pix(this, 16)) .setMessagePosition(Gravity.TOP) .setMessage(getString(R.string.swipe_toolbar_hide_show)) .setButton(getString(R.string.get_it)) .setBackgroundColor(0x73000000) .setOnDissmisListener(new View.OnClickListener() { @Override public void onClick(View v) { Settings.putGuideTypeSend(false); } }).show(); } @Override public void reply(Site site, String id, String presetText, boolean report) { if (Settings.getGuideTypeSend()) { showSwipeGuide(); } FragmentManager fragmentManager = getSupportFragmentManager(); Fragment fragment = fragmentManager.findFragmentByTag(TAG_FRAGMENT_TYPE_SEND); if (fragment == null && !TextUtils.isEmpty(id)) { Bundle args = new Bundle(); args.putString(TypeSendFragment.KEY_ACTION, report ? TypeSendFragment.ACTION_REPORT : TypeSendFragment.ACTION_REPLY); args.putInt(TypeSendFragment.KEY_SITE, site.getId()); args.putString(TypeSendFragment.KEY_ID, id); args.putString(TypeSendFragment.KEY_TEXT, presetText); TypeSendFragment typeSendFragment = new TypeSendFragment(); typeSendFragment.setArguments(args); typeSendFragment.setFragmentHost(this); typeSendFragment.setCallback(this); FragmentTransaction transaction = getSupportFragmentManager().beginTransaction(); transaction.setCustomAnimations(R.anim.fragment_translate_in, R.anim.fragment_translate_out); transaction.add(R.id.fragment_container, typeSendFragment, TAG_FRAGMENT_TYPE_SEND); transaction.commitAllowingStateLoss(); getSwipeBackLayout().setSwipeEnabled(false); } } @Override public void onClickBack(TypeSendFragment fragment) { if (fragment.checkBeforeFinish()) { fragment.getFragmentHost().finishFragment(fragment); } else { mPostLayout.showTypeSend(); } } @Override public void finishFragment(BaseFragment fragment) { if (fragment instanceof PostFragment) { finish(); } else if (fragment instanceof TypeSendFragment) { FragmentManager fragmentManager = getSupportFragmentManager(); FragmentTransaction transaction = fragmentManager.beginTransaction(); transaction.setCustomAnimations(R.anim.fragment_translate_in, R.anim.fragment_translate_out); transaction.remove(fragment); transaction.commitAllowingStateLoss(); getSwipeBackLayout().setSwipeEnabled(true); mPostLayout.onRemoveTypeSend(); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.transcribe.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Describes the input media file in a transcription request. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/transcribe-2017-10-26/Media" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class Media implements Serializable, Cloneable, StructuredPojo { /** * <p> * The S3 object location of the input media file. The URI must be in the same region as the API endpoint that you * are calling. The general form is: * </p> * <p> * <code> s3://&lt;AWSDOC-EXAMPLE-BUCKET&gt;/&lt;keyprefix&gt;/&lt;objectkey&gt;</code> * </p> * <p> * For example: * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/example.mp4</code> * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/mediadocs/example.mp4</code> * </p> * <p> * For more information about S3 object names, see <a * href="https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html#object-keys">Object Keys</a> in the * <i>Amazon S3 Developer Guide</i>. * </p> */ private String mediaFileUri; /** * <p> * The S3 object location for your redacted output media file. This is only supported for call analytics jobs. * </p> */ private String redactedMediaFileUri; /** * <p> * The S3 object location of the input media file. The URI must be in the same region as the API endpoint that you * are calling. The general form is: * </p> * <p> * <code> s3://&lt;AWSDOC-EXAMPLE-BUCKET&gt;/&lt;keyprefix&gt;/&lt;objectkey&gt;</code> * </p> * <p> * For example: * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/example.mp4</code> * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/mediadocs/example.mp4</code> * </p> * <p> * For more information about S3 object names, see <a * href="https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html#object-keys">Object Keys</a> in the * <i>Amazon S3 Developer Guide</i>. * </p> * * @param mediaFileUri * The S3 object location of the input media file. The URI must be in the same region as the API endpoint * that you are calling. The general form is:</p> * <p> * <code> s3://&lt;AWSDOC-EXAMPLE-BUCKET&gt;/&lt;keyprefix&gt;/&lt;objectkey&gt;</code> * </p> * <p> * For example: * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/example.mp4</code> * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/mediadocs/example.mp4</code> * </p> * <p> * For more information about S3 object names, see <a * href="https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html#object-keys">Object Keys</a> in * the <i>Amazon S3 Developer Guide</i>. */ public void setMediaFileUri(String mediaFileUri) { this.mediaFileUri = mediaFileUri; } /** * <p> * The S3 object location of the input media file. The URI must be in the same region as the API endpoint that you * are calling. The general form is: * </p> * <p> * <code> s3://&lt;AWSDOC-EXAMPLE-BUCKET&gt;/&lt;keyprefix&gt;/&lt;objectkey&gt;</code> * </p> * <p> * For example: * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/example.mp4</code> * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/mediadocs/example.mp4</code> * </p> * <p> * For more information about S3 object names, see <a * href="https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html#object-keys">Object Keys</a> in the * <i>Amazon S3 Developer Guide</i>. * </p> * * @return The S3 object location of the input media file. The URI must be in the same region as the API endpoint * that you are calling. The general form is:</p> * <p> * <code> s3://&lt;AWSDOC-EXAMPLE-BUCKET&gt;/&lt;keyprefix&gt;/&lt;objectkey&gt;</code> * </p> * <p> * For example: * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/example.mp4</code> * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/mediadocs/example.mp4</code> * </p> * <p> * For more information about S3 object names, see <a * href="https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html#object-keys">Object Keys</a> in * the <i>Amazon S3 Developer Guide</i>. */ public String getMediaFileUri() { return this.mediaFileUri; } /** * <p> * The S3 object location of the input media file. The URI must be in the same region as the API endpoint that you * are calling. The general form is: * </p> * <p> * <code> s3://&lt;AWSDOC-EXAMPLE-BUCKET&gt;/&lt;keyprefix&gt;/&lt;objectkey&gt;</code> * </p> * <p> * For example: * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/example.mp4</code> * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/mediadocs/example.mp4</code> * </p> * <p> * For more information about S3 object names, see <a * href="https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html#object-keys">Object Keys</a> in the * <i>Amazon S3 Developer Guide</i>. * </p> * * @param mediaFileUri * The S3 object location of the input media file. The URI must be in the same region as the API endpoint * that you are calling. The general form is:</p> * <p> * <code> s3://&lt;AWSDOC-EXAMPLE-BUCKET&gt;/&lt;keyprefix&gt;/&lt;objectkey&gt;</code> * </p> * <p> * For example: * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/example.mp4</code> * </p> * <p> * <code>s3://AWSDOC-EXAMPLE-BUCKET/mediadocs/example.mp4</code> * </p> * <p> * For more information about S3 object names, see <a * href="https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html#object-keys">Object Keys</a> in * the <i>Amazon S3 Developer Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public Media withMediaFileUri(String mediaFileUri) { setMediaFileUri(mediaFileUri); return this; } /** * <p> * The S3 object location for your redacted output media file. This is only supported for call analytics jobs. * </p> * * @param redactedMediaFileUri * The S3 object location for your redacted output media file. This is only supported for call analytics * jobs. */ public void setRedactedMediaFileUri(String redactedMediaFileUri) { this.redactedMediaFileUri = redactedMediaFileUri; } /** * <p> * The S3 object location for your redacted output media file. This is only supported for call analytics jobs. * </p> * * @return The S3 object location for your redacted output media file. This is only supported for call analytics * jobs. */ public String getRedactedMediaFileUri() { return this.redactedMediaFileUri; } /** * <p> * The S3 object location for your redacted output media file. This is only supported for call analytics jobs. * </p> * * @param redactedMediaFileUri * The S3 object location for your redacted output media file. This is only supported for call analytics * jobs. * @return Returns a reference to this object so that method calls can be chained together. */ public Media withRedactedMediaFileUri(String redactedMediaFileUri) { setRedactedMediaFileUri(redactedMediaFileUri); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getMediaFileUri() != null) sb.append("MediaFileUri: ").append(getMediaFileUri()).append(","); if (getRedactedMediaFileUri() != null) sb.append("RedactedMediaFileUri: ").append(getRedactedMediaFileUri()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Media == false) return false; Media other = (Media) obj; if (other.getMediaFileUri() == null ^ this.getMediaFileUri() == null) return false; if (other.getMediaFileUri() != null && other.getMediaFileUri().equals(this.getMediaFileUri()) == false) return false; if (other.getRedactedMediaFileUri() == null ^ this.getRedactedMediaFileUri() == null) return false; if (other.getRedactedMediaFileUri() != null && other.getRedactedMediaFileUri().equals(this.getRedactedMediaFileUri()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getMediaFileUri() == null) ? 0 : getMediaFileUri().hashCode()); hashCode = prime * hashCode + ((getRedactedMediaFileUri() == null) ? 0 : getRedactedMediaFileUri().hashCode()); return hashCode; } @Override public Media clone() { try { return (Media) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.transcribe.model.transform.MediaMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package net.blay09.mods.trashslot.client.gui; import com.mojang.blaze3d.platform.GlStateManager; import net.blay09.mods.trashslot.TrashSlot; import net.blay09.mods.trashslot.TrashSlotConfig; import net.blay09.mods.trashslot.api.IGuiContainerLayout; import net.blay09.mods.trashslot.api.SlotRenderStyle; import net.blay09.mods.trashslot.api.Snap; import net.blay09.mods.trashslot.client.ContainerSettings; import net.blay09.mods.trashslot.client.SlotTrash; import net.blay09.mods.trashslot.client.TrashSlotGui; import net.blay09.mods.trashslot.client.deletion.DeletionProvider; import net.minecraft.client.gui.AbstractGui; import net.minecraft.client.gui.screen.Screen; import net.minecraft.client.gui.screen.inventory.ContainerScreen; import net.minecraft.client.renderer.Rectangle2d; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.MathHelper; import java.awt.*; public class GuiTrashSlot extends AbstractGui { private static final ResourceLocation texture = new ResourceLocation(TrashSlot.MOD_ID, "textures/gui/slot.png"); private static final int SNAP_SIZE = 7; private final TrashSlotGui trashSlotGui; private final ContainerScreen<?> gui; private final IGuiContainerLayout layout; private final ContainerSettings settings; private final SlotTrash trashSlot; private SlotRenderStyle renderStyle = SlotRenderStyle.LONE; private boolean wasMouseDown; private boolean isDragging; private int dragStartX; private int dragStartY; public GuiTrashSlot(TrashSlotGui trashSlotGui, ContainerScreen<?> gui, IGuiContainerLayout layout, ContainerSettings settings, SlotTrash trashSlot) { this.trashSlotGui = trashSlotGui; this.gui = gui; this.layout = layout; this.settings = settings; this.trashSlot = trashSlot; } public boolean isInside(int mouseX, int mouseY) { int anchoredX = getAnchoredX(); int anchoredY = getAnchoredY(); int renderX = anchoredX + renderStyle.getRenderOffsetX() + layout.getSlotOffsetX(gui, renderStyle); int renderY = anchoredY + renderStyle.getRenderOffsetY() + layout.getSlotOffsetY(gui, renderStyle); return mouseX >= renderX && mouseY >= renderY && mouseX < renderX + renderStyle.getRenderWidth() && mouseY < renderY + renderStyle.getRenderHeight(); } public void update(int mouseX, int mouseY) { int anchoredX = getAnchoredX(); int anchoredY = getAnchoredY(); int renderX = anchoredX + renderStyle.getRenderOffsetX() + layout.getSlotOffsetX(gui, renderStyle); int renderY = anchoredY + renderStyle.getRenderOffsetY() + layout.getSlotOffsetY(gui, renderStyle); boolean isMouseOver = mouseX >= renderX && mouseY >= renderY && mouseX < renderX + renderStyle.getRenderWidth() && mouseY < renderY + renderStyle.getRenderHeight(); if (trashSlotGui.isLeftMouseDown()) { if (!isDragging && isMouseOver && !wasMouseDown) { if (gui.getMinecraft().player.inventory.getItemStack().isEmpty() && (!trashSlot.getHasStack() || !gui.isSlotSelected(trashSlot, mouseX, mouseY))) { dragStartX = renderX - mouseX; dragStartY = renderY - mouseY; isDragging = true; } } wasMouseDown = true; } else { if (isDragging) { settings.save(TrashSlotConfig.clientConfig); isDragging = false; } wasMouseDown = false; } if (isDragging) { int targetX = mouseX + dragStartX; int targetY = mouseY + dragStartY; for (Rectangle collisionArea : layout.getCollisionAreas(gui)) { int targetRight = targetX + renderStyle.getWidth(); int targetBottom = targetY + renderStyle.getHeight(); int rectRight = collisionArea.x + collisionArea.width; int rectBottom = collisionArea.y + collisionArea.height; if (targetRight >= collisionArea.x && targetX < rectRight && targetBottom >= collisionArea.y && targetY < rectBottom) { int distLeft = targetRight - collisionArea.x; int distRight = rectRight - targetX; int distTop = targetBottom - collisionArea.y; int distBottom = rectBottom - targetY; if (anchoredX >= collisionArea.x && anchoredX < collisionArea.x + collisionArea.width) { targetY = distTop < distBottom ? collisionArea.y - renderStyle.getHeight() : collisionArea.y + collisionArea.height; } else { targetX = distLeft < distRight ? collisionArea.x - renderStyle.getWidth() : collisionArea.x + collisionArea.width; } } } if (!Screen.hasShiftDown()) { int bestSnapDist = Integer.MAX_VALUE; Snap bestSnap = null; for (Snap snap : layout.getSnaps(gui, renderStyle)) { int dist = Integer.MAX_VALUE; switch (snap.getType()) { case HORIZONTAL: dist = Math.abs(snap.getY() - targetY); break; case VERTICAL: dist = Math.abs(snap.getX() - targetX); break; case FIXED: int distX = snap.getX() - targetX; int distY = snap.getY() - targetY; dist = (int) Math.sqrt(distX * distX + distY * distY); break; } if (dist < SNAP_SIZE && dist < bestSnapDist) { bestSnap = snap; bestSnapDist = dist; } } if (bestSnap != null) { if (bestSnap.getType() == Snap.Type.VERTICAL || bestSnap.getType() == Snap.Type.FIXED) { targetX = bestSnap.getX(); } if (bestSnap.getType() == Snap.Type.HORIZONTAL || bestSnap.getType() == Snap.Type.FIXED) { targetY = bestSnap.getY(); } } } targetX = MathHelper.clamp(targetX, 0, gui.width - renderStyle.getRenderWidth()); targetY = MathHelper.clamp(targetY, 0, gui.height - renderStyle.getRenderHeight()); settings.setSlotX(getUnanchoredX(targetX)); settings.setSlotY(getUnanchoredY(targetY)); } } public void drawBackground() { int renderX = getAnchoredX(); int renderY = getAnchoredY(); renderStyle = layout.getSlotRenderStyle(gui, renderX, renderY); trashSlot.xPos = renderX - gui.getGuiLeft() + renderStyle.getSlotOffsetX() + layout.getSlotOffsetX(gui, renderStyle); trashSlot.yPos = renderY - gui.getGuiTop() + renderStyle.getSlotOffsetY() + layout.getSlotOffsetY(gui, renderStyle); blitOffset = 1; GlStateManager.color4f(1f, 1f, 1f, 1f); gui.getMinecraft().getTextureManager().bindTexture(texture); renderX += renderStyle.getRenderOffsetX() + layout.getSlotOffsetX(gui, renderStyle); renderY += renderStyle.getRenderOffsetY() + layout.getSlotOffsetY(gui, renderStyle); DeletionProvider deletionProvider = TrashSlotConfig.getDeletionProvider(); int texOffsetX = 0; if (deletionProvider == null || !deletionProvider.canUndeleteLast()) { texOffsetX = 64; } switch (renderStyle) { case LONE: blit(renderX, renderY, texOffsetX, 56, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); break; case ATTACH_BOTTOM_CENTER: blit(renderX, renderY, texOffsetX, 0, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX, renderY, texOffsetX + 50, 29, 4, 4); blit(renderX + renderStyle.getRenderWidth() - 4, renderY, texOffsetX + 54, 29, 4, 4); break; case ATTACH_BOTTOM_LEFT: blit(renderX, renderY, texOffsetX, 0, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX + renderStyle.getRenderWidth() - 4, renderY, texOffsetX + 54, 29, 4, 4); break; case ATTACH_BOTTOM_RIGHT: blit(renderX, renderY, texOffsetX, 0, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX, renderY, texOffsetX + 50, 29, 4, 4); break; case ATTACH_TOP_CENTER: blit(renderX, renderY, texOffsetX + 32, 0, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX, renderY + renderStyle.getRenderHeight() - 4, texOffsetX + 50, 25, 4, 4); blit(renderX + renderStyle.getRenderWidth() - 4, renderY + renderStyle.getRenderHeight() - 4, texOffsetX + 54, 25, 4, 4); break; case ATTACH_TOP_LEFT: blit(renderX, renderY, texOffsetX + 32, 0, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX + renderStyle.getRenderWidth() - 4, renderY + renderStyle.getRenderHeight() - 4, texOffsetX + 54, 25, 4, 4); break; case ATTACH_TOP_RIGHT: blit(renderX, renderY, texOffsetX + 32, 0, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX, renderY + renderStyle.getRenderHeight() - 4, texOffsetX + 50, 25, 4, 4); break; case ATTACH_LEFT_CENTER: blit(renderX, renderY, texOffsetX + 25, 25, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX + renderStyle.getRenderWidth() - 4, renderY, texOffsetX + 50, 33, 4, 4); blit(renderX + renderStyle.getRenderWidth() - 4, renderY + renderStyle.getRenderHeight() - 4, texOffsetX + 50, 37, 4, 4); break; case ATTACH_LEFT_TOP: blit(renderX, renderY, texOffsetX + 25, 25, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX + renderStyle.getRenderWidth() - 4, renderY + renderStyle.getRenderHeight() - 4, texOffsetX + 50, 37, 4, 4); break; case ATTACH_LEFT_BOTTOM: blit(renderX, renderY, texOffsetX + 25, 25, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX + renderStyle.getRenderWidth() - 4, renderY, texOffsetX + 50, 33, 4, 4); break; case ATTACH_RIGHT_CENTER: blit(renderX, renderY, texOffsetX, 25, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX, renderY, texOffsetX + 54, 33, 4, 4); blit(renderX, renderY + renderStyle.getRenderHeight() - 4, texOffsetX + 54, 37, 4, 4); break; case ATTACH_RIGHT_TOP: blit(renderX, renderY, texOffsetX, 25, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX, renderY + renderStyle.getRenderHeight() - 4, texOffsetX + 54, 37, 4, 4); break; case ATTACH_RIGHT_BOTTOM: blit(renderX, renderY, texOffsetX, 25, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); blit(renderX, renderY, texOffsetX + 54, 33, 4, 4); break; } blitOffset = 0; } private int getAnchoredX() { return MathHelper.clamp(settings.getSlotX() + gui.getGuiLeft() + (int) (gui.getXSize() * settings.getAnchorX()), 0, gui.width - renderStyle.getRenderWidth()); } private int getUnanchoredX(int x) { return x - gui.getGuiLeft() - (int) (gui.getXSize() * settings.getAnchorX()); } private int getAnchoredY() { return MathHelper.clamp(settings.getSlotY() + gui.getGuiTop() + (int) (gui.getYSize() * settings.getAnchorY()), 0, gui.width - renderStyle.getRenderWidth()); } private int getUnanchoredY(int y) { return y - gui.getGuiTop() - (int) (gui.getYSize() * settings.getAnchorY()); } public boolean isVisible() { return settings.isEnabled(); } public Rectangle2d getRectangle() { int anchoredX = getAnchoredX(); int anchoredY = getAnchoredY(); int renderX = anchoredX + renderStyle.getRenderOffsetX() + layout.getSlotOffsetX(gui, renderStyle); int renderY = anchoredY + renderStyle.getRenderOffsetY() + layout.getSlotOffsetY(gui, renderStyle); return new Rectangle2d(renderX, renderY, renderStyle.getRenderWidth(), renderStyle.getRenderHeight()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.ant; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.DirectoryScanner; import org.apache.tools.ant.Task; import org.apache.tools.ant.types.DirSet; import org.apache.tools.ant.types.Path; import org.apache.tools.ant.types.PatternSet; import org.codehaus.groovy.runtime.ResourceGroovyMethods; import org.codehaus.groovy.tools.groovydoc.ClasspathResourceManager; import org.codehaus.groovy.tools.groovydoc.FileOutputTool; import org.codehaus.groovy.tools.groovydoc.GroovyDocTool; import org.codehaus.groovy.tools.groovydoc.LinkArgument; import org.codehaus.groovy.tools.groovydoc.gstringTemplates.GroovyDocTemplateInfo; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Properties; import java.util.StringTokenizer; /** * Access to the GroovyDoc tool from Ant. */ public class Groovydoc extends Task { private final LoggingHelper log = new LoggingHelper(this); private Path sourcePath; private File destDir; private final List<String> packageNames; private final List<String> excludePackageNames; private String windowTitle = "Groovy Documentation"; private String docTitle = "Groovy Documentation"; private String footer = "Groovy Documentation"; private String header = "Groovy Documentation"; private Boolean privateScope; private Boolean protectedScope; private Boolean packageScope; private Boolean publicScope; private Boolean author; private Boolean processScripts; private Boolean includeMainForScripts; private boolean useDefaultExcludes; private boolean includeNoSourcePackages; private Boolean noTimestamp; private Boolean noVersionStamp; private final List<DirSet> packageSets; private final List<String> sourceFilesToDoc; private final List<LinkArgument> links = new ArrayList<LinkArgument>(); private File overviewFile; private File styleSheetFile; // dev note: update javadoc comment for #setExtensions(String) if updating below private String extensions = ".java:.groovy:.gv:.gvy:.gsh"; private String charset; private String fileEncoding; public Groovydoc() { packageNames = new ArrayList<String>(); excludePackageNames = new ArrayList<String>(); packageSets = new ArrayList<DirSet>(); sourceFilesToDoc = new ArrayList<String>(); privateScope = false; protectedScope = false; publicScope = false; packageScope = false; useDefaultExcludes = true; includeNoSourcePackages = false; author = true; processScripts = true; includeMainForScripts = true; noTimestamp = false; noVersionStamp = false; } /** * Specify where to find source file * * @param src a Path instance containing the various source directories. */ public void setSourcepath(Path src) { if (sourcePath == null) { sourcePath = src; } else { sourcePath.append(src); } } /** * Set the directory where the Groovydoc output will be generated. * * @param dir the destination directory. */ public void setDestdir(File dir) { destDir = dir; // todo: maybe tell groovydoc to use file output } /** * If set to false, author will not be displayed. * Currently not used. * * @param author new value */ public void setAuthor(boolean author) { this.author = author; } /** * If set to true, hidden timestamp will not appear within generated HTML. * * @param noTimestamp new value */ public void setNoTimestamp(boolean noTimestamp) { this.noTimestamp = noTimestamp; } /** * If set to true, hidden version stamp will not appear within generated HTML. * * @param noVersionStamp new value */ public void setNoVersionStamp(boolean noVersionStamp) { this.noVersionStamp = noVersionStamp; } /** * If set to false, Scripts will not be processed. * Defaults to true. * * @param processScripts new value */ public void setProcessScripts(boolean processScripts) { this.processScripts = processScripts; } /** * If set to false, 'public static void main' method will not be displayed. * Defaults to true. Ignored when not processing Scripts. * * @param includeMainForScripts new value */ public void setIncludeMainForScripts(boolean includeMainForScripts) { this.includeMainForScripts = includeMainForScripts; } /** * A colon-separated list of filename extensions to look for when searching for files to process in a given directory. * Default value: <code>.java:.groovy:.gv:.gvy:.gsh</code> * * @param extensions new value */ public void setExtensions(String extensions) { this.extensions = extensions; } /** * Set the package names to be processed. * * @param packages a comma separated list of packages specs * (may be wildcarded). */ public void setPackagenames(String packages) { StringTokenizer tok = new StringTokenizer(packages, ","); while (tok.hasMoreTokens()) { String packageName = tok.nextToken(); packageNames.add(packageName); } } public void setUse(boolean b) { //ignore as 'use external file' irrelevant with groovydoc :-) } /** * Set the title to be placed in the HTML &lt;title&gt; tag of the * generated documentation. * * @param title the window title to use. */ public void setWindowtitle(String title) { windowTitle = title; } /** * Set the title for the overview page. * * @param htmlTitle the html to use for the title. */ public void setDoctitle(String htmlTitle) { docTitle = htmlTitle; } /** * Specify the file containing the overview to be included in the generated documentation. * * @param file the overview file */ public void setOverview(File file) { overviewFile = file; } /** * Indicates the access mode or scope of interest: one of public, protected, package, or private. * Package scoped access is ignored for fields of Groovy classes where they correspond to properties. * * @param access one of public, protected, package, or private */ public void setAccess(String access) { if ("public".equals(access)) publicScope = true; else if ("protected".equals(access)) protectedScope = true; else if ("package".equals(access)) packageScope = true; else if ("private".equals(access)) privateScope = true; } /** * Indicate whether all classes and * members are to be included in the scope processed. * * @param b true if scope is to be private level. */ public void setPrivate(boolean b) { privateScope = b; } /** * Indicate whether only public classes and members are to be included in the scope processed. * * @param b true if scope only includes public level classes and members */ public void setPublic(boolean b) { publicScope = b; } /** * Indicate whether only protected and public classes and members are to be included in the scope processed. * * @param b true if scope includes protected level classes and members */ public void setProtected(boolean b) { protectedScope = b; } /** * Indicate whether only package, protected and public classes and members are to be included in the scope processed. * Package scoped access is ignored for fields of Groovy classes where they correspond to properties. * * @param b true if scope includes package level classes and members */ public void setPackage(boolean b) { packageScope = b; } /** * Set the footer to place at the bottom of each generated html page. * * @param footer the footer value */ public void setFooter(String footer) { this.footer = footer; } /** * Specifies the header text to be placed at the top of each output file. * The header will be placed to the right of the upper navigation bar. * It may contain HTML tags and white space, though if it does, it must * be enclosed in quotes. Any internal quotation marks within the header * may have to be escaped. * * @param header the header value */ public void setHeader(String header) { this.header = header; } /** * Specifies the charset to be used in the templates, i.e.&#160;the value output within: * &lt;meta http-equiv="Content-Type" content="text/html; charset=<em>charset</em>"&gt;. * * @param charset the charset value */ public void setCharset(String charset) { this.charset = charset; } /** * Specifies the file encoding to be used for generated files. If <em>fileEncoding</em> is missing, * the <em>charset</em> encoding will be used for writing the files. If <em>fileEncoding</em> and * <em>charset</em> are missing, the file encoding will default to <em>Charset.defaultCharset()</em>. * * @param fileEncoding the file encoding */ public void setFileEncoding(String fileEncoding) { this.fileEncoding = fileEncoding; } /** * Specifies a stylesheet file to use. If not specified, * a default one will be generated for you. * * @param styleSheetFile the css stylesheet file to use */ public void setStyleSheetFile(File styleSheetFile) { this.styleSheetFile = styleSheetFile; } /** * Add the directories matched by the nested dirsets to the resulting * packages list and the base directories of the dirsets to the Path. * It also handles the packages and excludepackages attributes and * elements. * * @param resultantPackages a list to which we add the packages found * @param sourcePath a path to which we add each basedir found * @since 1.5 */ private void parsePackages(List<String> resultantPackages, Path sourcePath) { List<String> addedPackages = new ArrayList<String>(); List<DirSet> dirSets = new ArrayList<DirSet>(packageSets); // for each sourcePath entry, add a directoryset with includes // taken from packagenames attribute and nested package // elements and excludes taken from excludepackages attribute // and nested excludepackage elements if (this.sourcePath != null) { PatternSet ps = new PatternSet(); if (!packageNames.isEmpty()) { for (String pn : packageNames) { String pkg = pn.replace('.', '/'); if (pkg.endsWith("*")) { pkg += "*"; } ps.createInclude().setName(pkg); } } else { ps.createInclude().setName("**"); } for (String epn : excludePackageNames) { String pkg = epn.replace('.', '/'); if (pkg.endsWith("*")) { pkg += "*"; } ps.createExclude().setName(pkg); } String[] pathElements = this.sourcePath.list(); for (String pathElement : pathElements) { File dir = new File(pathElement); if (dir.isDirectory()) { DirSet ds = new DirSet(); ds.setDefaultexcludes(useDefaultExcludes); ds.setDir(dir); ds.createPatternSet().addConfiguredPatternset(ps); dirSets.add(ds); } else { log.warn("Skipping " + pathElement + " since it is no directory."); } } } for (DirSet ds : dirSets) { File baseDir = ds.getDir(getProject()); log.debug("scanning " + baseDir + " for packages."); DirectoryScanner dsc = ds.getDirectoryScanner(getProject()); String[] dirs = dsc.getIncludedDirectories(); boolean containsPackages = false; for (String dir : dirs) { // are there any groovy or java files in this directory? File pd = new File(baseDir, dir); String[] files = pd.list(new FilenameFilter() { public boolean accept(File dir1, String name) { if (!includeNoSourcePackages && name.equals("package.html")) return true; final StringTokenizer tokenizer = new StringTokenizer(extensions, ":"); while (tokenizer.hasMoreTokens()) { String ext = tokenizer.nextToken(); if (name.endsWith(ext)) return true; } return false; } }); for (String filename : Arrays.asList(files)) { sourceFilesToDoc.add(dir + File.separator + filename); } if (files.length > 0) { if ("".equals(dir)) { log.warn(baseDir + " contains source files in the default package," + " you must specify them as source files not packages."); } else { containsPackages = true; String pn = dir.replace(File.separatorChar, '.'); if (!addedPackages.contains(pn)) { addedPackages.add(pn); resultantPackages.add(pn); } } } } if (containsPackages) { // We don't need to care for duplicates here, // Path.list does it for us. sourcePath.createPathElement().setLocation(baseDir); } else { log.verbose(baseDir + " doesn't contain any packages, dropping it."); } } } public void execute() throws BuildException { List<String> packagesToDoc = new ArrayList<String>(); Path sourceDirs = new Path(getProject()); Properties properties = new Properties(); properties.setProperty("windowTitle", windowTitle); properties.setProperty("docTitle", docTitle); properties.setProperty("footer", footer); properties.setProperty("header", header); checkScopeProperties(properties); properties.setProperty("publicScope", publicScope.toString()); properties.setProperty("protectedScope", protectedScope.toString()); properties.setProperty("packageScope", packageScope.toString()); properties.setProperty("privateScope", privateScope.toString()); properties.setProperty("author", author.toString()); properties.setProperty("processScripts", processScripts.toString()); properties.setProperty("includeMainForScripts", includeMainForScripts.toString()); properties.setProperty("overviewFile", overviewFile != null ? overviewFile.getAbsolutePath() : ""); properties.setProperty("charset", charset != null ? charset : ""); properties.setProperty("fileEncoding", fileEncoding != null ? fileEncoding : ""); properties.setProperty("timestamp", Boolean.valueOf(!noTimestamp).toString()); properties.setProperty("versionStamp", Boolean.valueOf(!noVersionStamp).toString()); if (sourcePath != null) { sourceDirs.addExisting(sourcePath); } parsePackages(packagesToDoc, sourceDirs); GroovyDocTool htmlTool = new GroovyDocTool( new ClasspathResourceManager(), // we're gonna get the default templates out of the dist jar file sourcePath.list(), getDocTemplates(), getPackageTemplates(), getClassTemplates(), links, properties ); try { htmlTool.add(sourceFilesToDoc); FileOutputTool output = new FileOutputTool(); htmlTool.renderToOutput(output, destDir.getCanonicalPath()); // TODO push destDir through APIs? } catch (Exception e) { e.printStackTrace(); } // try to override the default stylesheet with custom specified one if needed if (styleSheetFile != null) { try { String css = ResourceGroovyMethods.getText(styleSheetFile); File outfile = new File(destDir, "stylesheet.css"); ResourceGroovyMethods.setText(outfile, css); } catch (IOException e) { System.out.println("Warning: Unable to copy specified stylesheet '" + styleSheetFile.getAbsolutePath() + "'. Using default stylesheet instead. Due to: " + e.getMessage()); } } } private void checkScopeProperties(Properties properties) { // make protected the default scope and check for invalid duplication int scopeCount = 0; if (packageScope) scopeCount++; if (privateScope) scopeCount++; if (protectedScope) scopeCount++; if (publicScope) scopeCount++; if (scopeCount == 0) { protectedScope = true; } else if (scopeCount > 1) { throw new BuildException("More than one of public, private, package, or protected scopes specified."); } } /** * Create link to Javadoc/GroovyDoc output at the given URL. * * @return link argument to configure */ public LinkArgument createLink() { LinkArgument result = new LinkArgument(); links.add(result); return result; } /** * Creates and returns an array of package template classpath entries. * <p> * This method is meant to be overridden by custom GroovyDoc implementations, using custom package templates. * * @return an array of package templates, whereas each entry is resolved as classpath entry, defaults to * {@link GroovyDocTemplateInfo#DEFAULT_PACKAGE_TEMPLATES}. */ protected String[] getPackageTemplates() { return GroovyDocTemplateInfo.DEFAULT_PACKAGE_TEMPLATES; } /** * Creates and returns an array of doc template classpath entries. * <p> * This method is meant to be overridden by custom GroovyDoc implementations, using custom doc templates. * * @return an array of doc templates, whereas each entry is resolved as classpath entry, defaults to * {@link GroovyDocTemplateInfo#DEFAULT_DOC_TEMPLATES}. */ protected String[] getDocTemplates() { return GroovyDocTemplateInfo.DEFAULT_DOC_TEMPLATES; } /** * Creates and returns an array of class template classpath entries. * <p> * This method is meant to be overridden by custom GroovyDoc implementations, using custom class templates. * * @return an array of class templates, whereas each entry is resolved as classpath entry, defaults to * {@link GroovyDocTemplateInfo#DEFAULT_CLASS_TEMPLATES}. */ protected String[] getClassTemplates() { return GroovyDocTemplateInfo.DEFAULT_CLASS_TEMPLATES; } }
package com.fsck.k9.provider; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.Semaphore; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import android.annotation.TargetApi; import android.app.Application; import android.content.ContentProvider; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Context; import android.content.UriMatcher; import android.database.CharArrayBuffer; import android.database.ContentObserver; import android.database.CrossProcessCursor; import android.database.Cursor; import android.database.CursorWindow; import android.database.DataSetObserver; import android.database.MatrixCursor; import android.net.Uri; import android.os.Binder; import android.os.Build; import android.os.Bundle; import android.provider.BaseColumns; import android.util.Log; import com.fsck.k9.Account; import com.fsck.k9.AccountStats; import com.fsck.k9.BuildConfig; import com.fsck.k9.K9; import com.fsck.k9.Preferences; import com.fsck.k9.activity.FolderInfoHolder; import com.fsck.k9.activity.MessageInfoHolder; import com.fsck.k9.activity.MessageReference; import com.fsck.k9.controller.MessagingController; import com.fsck.k9.controller.MessagingListener; import com.fsck.k9.helper.MessageHelper; import com.fsck.k9.mail.Flag; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mailstore.LocalFolder; import com.fsck.k9.mailstore.LocalMessage; import com.fsck.k9.search.SearchAccount; public class MessageProvider extends ContentProvider { public static final String AUTHORITY = BuildConfig.APPLICATION_ID + ".messageprovider"; public static final Uri CONTENT_URI = Uri.parse("content://" + AUTHORITY); private static final String[] DEFAULT_MESSAGE_PROJECTION = new String[] { MessageColumns._ID, MessageColumns.SEND_DATE, MessageColumns.SENDER, MessageColumns.SUBJECT, MessageColumns.PREVIEW, MessageColumns.ACCOUNT, MessageColumns.URI, MessageColumns.DELETE_URI, MessageColumns.SENDER_ADDRESS }; private static final String[] DEFAULT_ACCOUNT_PROJECTION = new String[] { AccountColumns.ACCOUNT_NUMBER, AccountColumns.ACCOUNT_NAME, }; private static final String[] UNREAD_PROJECTION = new String[] { UnreadColumns.ACCOUNT_NAME, UnreadColumns.UNREAD }; private UriMatcher uriMatcher = new UriMatcher(UriMatcher.NO_MATCH); private List<QueryHandler> queryHandlers = new ArrayList<QueryHandler>(); private MessageHelper messageHelper; /** * How many simultaneous cursors we can afford to expose at once */ Semaphore semaphore = new Semaphore(1); ScheduledExecutorService scheduledPool = Executors.newScheduledThreadPool(1); @Override public boolean onCreate() { messageHelper = MessageHelper.getInstance(getContext()); registerQueryHandler(new ThrottlingQueryHandler(new AccountsQueryHandler())); registerQueryHandler(new ThrottlingQueryHandler(new MessagesQueryHandler())); registerQueryHandler(new ThrottlingQueryHandler(new UnreadQueryHandler())); K9.registerApplicationAware(new K9.ApplicationAware() { @Override public void initializeComponent(final Application application) { Log.v(K9.LOG_TAG, "Registering content resolver notifier"); MessagingController.getInstance(application).addListener(new MessagingListener() { @Override public void folderStatusChanged(Account account, String folderName, int unreadMessageCount) { application.getContentResolver().notifyChange(CONTENT_URI, null); } }); } }); return true; } @Override public String getType(Uri uri) { if (K9.app == null) { return null; } if (K9.DEBUG) { Log.v(K9.LOG_TAG, "MessageProvider/getType: " + uri); } return null; } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { if (K9.app == null) { return null; } if (K9.DEBUG) { Log.v(K9.LOG_TAG, "MessageProvider/query: " + uri); } int code = uriMatcher.match(uri); if (code == -1) { throw new IllegalStateException("Unrecognized URI: " + uri); } Cursor cursor; try { QueryHandler handler = queryHandlers.get(code); cursor = handler.query(uri, projection, selection, selectionArgs, sortOrder); } catch (Exception e) { Log.e(K9.LOG_TAG, "Unable to execute query for URI: " + uri, e); return null; } return cursor; } @Override public int delete(Uri uri, String selection, String[] selectionArgs) { if (K9.app == null) { return 0; } if (K9.DEBUG) { Log.v(K9.LOG_TAG, "MessageProvider/delete: " + uri); } // Note: can only delete a message List<String> segments = uri.getPathSegments(); int accountId = Integer.parseInt(segments.get(1)); String folderName = segments.get(2); String msgUid = segments.get(3); // get account Account myAccount = null; for (Account account : Preferences.getPreferences(getContext()).getAccounts()) { if (account.getAccountNumber() == accountId) { myAccount = account; if (!account.isAvailable(getContext())) { Log.w(K9.LOG_TAG, "not deleting messages because account is unavailable at the moment"); return 0; } } } if (myAccount == null) { Log.e(K9.LOG_TAG, "Could not find account with id " + accountId); } if (myAccount != null) { MessageReference messageReference = new MessageReference(myAccount.getUuid(), folderName, msgUid, null); MessagingController controller = MessagingController.getInstance(getContext()); controller.deleteMessage(messageReference, null); } // FIXME return the actual number of deleted messages return 0; } @Override public Uri insert(Uri uri, ContentValues values) { if (K9.app == null) { return null; } if (K9.DEBUG) { Log.v(K9.LOG_TAG, "MessageProvider/insert: " + uri); } return null; } @Override public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) { if (K9.app == null) { return 0; } if (K9.DEBUG) { Log.v(K9.LOG_TAG, "MessageProvider/update: " + uri); } // TBD return 0; } /** * Register a {@link QueryHandler} to handle a certain {@link Uri} for * {@link #query(Uri, String[], String, String[], String)} */ protected void registerQueryHandler(QueryHandler handler) { if (queryHandlers.contains(handler)) { return; } queryHandlers.add(handler); int code = queryHandlers.indexOf(handler); uriMatcher.addURI(AUTHORITY, handler.getPath(), code); } public static class ReverseDateComparator implements Comparator<MessageInfoHolder> { @Override public int compare(MessageInfoHolder object2, MessageInfoHolder object1) { if (object1.compareDate == null) { return (object2.compareDate == null ? 0 : 1); } else if (object2.compareDate == null) { return -1; } else { return object1.compareDate.compareTo(object2.compareDate); } } } public interface MessageColumns extends BaseColumns { /** * The number of milliseconds since Jan. 1, 1970, midnight GMT. * * <P>Type: INTEGER (long)</P> */ String SEND_DATE = "date"; /** * <P>Type: TEXT</P> */ String SENDER = "sender"; /** * <P>Type: TEXT</P> */ String SENDER_ADDRESS = "senderAddress"; /** * <P>Type: TEXT</P> */ String SUBJECT = "subject"; /** * <P>Type: TEXT</P> */ String PREVIEW = "preview"; /** * <P>Type: BOOLEAN</P> */ String UNREAD = "unread"; /** * <P>Type: TEXT</P> */ String ACCOUNT = "account"; /** * <P>Type: INTEGER</P> */ String ACCOUNT_NUMBER = "accountNumber"; /** * <P>Type: BOOLEAN</P> */ String HAS_ATTACHMENTS = "hasAttachments"; /** * <P>Type: BOOLEAN</P> */ String HAS_STAR = "hasStar"; /** * <P>Type: INTEGER</P> */ String ACCOUNT_COLOR = "accountColor"; String URI = "uri"; String DELETE_URI = "delUri"; /** * @deprecated the field value is misnamed/misleading - present for compatibility purpose only. To be removed. */ @Deprecated String INCREMENT = "id"; } public interface AccountColumns { /** * <P>Type: INTEGER</P> */ String ACCOUNT_NUMBER = "accountNumber"; /** * <P>Type: String</P> */ String ACCOUNT_NAME = "accountName"; String ACCOUNT_UUID = "accountUuid"; String ACCOUNT_COLOR = "accountColor"; } public interface UnreadColumns { /** * <P>Type: String</P> */ String ACCOUNT_NAME = "accountName"; /** * <P>Type: INTEGER</P> */ String UNREAD = "unread"; } protected interface QueryHandler { /** * The path this instance is able to respond to. */ String getPath(); Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) throws Exception; } /** * Extracts a value from an object. */ public interface FieldExtractor<T, K> { K getField(T source); } /** * Extracts the {@link LocalMessage#getId() ID} from the given {@link MessageInfoHolder}. The underlying * {@link Message} is expected to be a {@link LocalMessage}. */ public static class IdExtractor implements FieldExtractor<MessageInfoHolder, Long> { @Override public Long getField(MessageInfoHolder source) { return source.message.getId(); } } public static class CountExtractor<T> implements FieldExtractor<T, Integer> { private Integer count; public CountExtractor(int count) { this.count = count; } @Override public Integer getField(T source) { return count; } } public static class SubjectExtractor implements FieldExtractor<MessageInfoHolder, String> { @Override public String getField(MessageInfoHolder source) { return source.message.getSubject(); } } public static class SendDateExtractor implements FieldExtractor<MessageInfoHolder, Long> { @Override public Long getField(MessageInfoHolder source) { return source.message.getSentDate().getTime(); } } public static class PreviewExtractor implements FieldExtractor<MessageInfoHolder, String> { @Override public String getField(MessageInfoHolder source) { return source.message.getPreview(); } } public static class UriExtractor implements FieldExtractor<MessageInfoHolder, String> { @Override public String getField(MessageInfoHolder source) { return source.uri; } } public static class DeleteUriExtractor implements FieldExtractor<MessageInfoHolder, String> { @Override public String getField(MessageInfoHolder source) { LocalMessage message = source.message; int accountNumber = message.getAccount().getAccountNumber(); return CONTENT_URI.buildUpon() .appendPath("delete_message") .appendPath(Integer.toString(accountNumber)) .appendPath(message.getFolder().getName()) .appendPath(message.getUid()) .build() .toString(); } } public static class SenderExtractor implements FieldExtractor<MessageInfoHolder, CharSequence> { @Override public CharSequence getField(MessageInfoHolder source) { return source.sender; } } public static class SenderAddressExtractor implements FieldExtractor<MessageInfoHolder, String> { @Override public String getField(MessageInfoHolder source) { return source.senderAddress; } } public static class AccountExtractor implements FieldExtractor<MessageInfoHolder, String> { @Override public String getField(MessageInfoHolder source) { return source.message.getAccount().getDescription(); } } public static class AccountColorExtractor implements FieldExtractor<MessageInfoHolder, Integer> { @Override public Integer getField(MessageInfoHolder source) { return source.message.getAccount().getChipColor(); } } public static class AccountNumberExtractor implements FieldExtractor<MessageInfoHolder, Integer> { @Override public Integer getField(MessageInfoHolder source) { return source.message.getAccount().getAccountNumber(); } } public static class HasAttachmentsExtractor implements FieldExtractor<MessageInfoHolder, Boolean> { @Override public Boolean getField(MessageInfoHolder source) { return source.message.hasAttachments(); } } public static class HasStarExtractor implements FieldExtractor<MessageInfoHolder, Boolean> { @Override public Boolean getField(MessageInfoHolder source) { return source.message.isSet(Flag.FLAGGED); } } public static class UnreadExtractor implements FieldExtractor<MessageInfoHolder, Boolean> { @Override public Boolean getField(MessageInfoHolder source) { return !source.read; } } /** * @deprecated having an incremental value has no real interest, implemented for compatibility only */ @Deprecated public static class IncrementExtractor implements FieldExtractor<MessageInfoHolder, Integer> { private int count = 0; @Override public Integer getField(MessageInfoHolder source) { return count++; } } /** * Retrieve messages from the integrated inbox. */ protected class MessagesQueryHandler implements QueryHandler { @Override public String getPath() { return "inbox_messages/"; } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) throws Exception { return getMessages(projection); } protected MatrixCursor getMessages(String[] projection) throws InterruptedException { BlockingQueue<List<MessageInfoHolder>> queue = new SynchronousQueue<List<MessageInfoHolder>>(); // new code for integrated inbox, only execute this once as it will be processed afterwards via the listener SearchAccount integratedInboxAccount = SearchAccount.createUnifiedInboxAccount(getContext()); MessagingController msgController = MessagingController.getInstance(getContext()); msgController.searchLocalMessages(integratedInboxAccount.getRelatedSearch(), new MessageInfoHolderRetrieverListener(queue)); List<MessageInfoHolder> holders = queue.take(); // TODO add sort order parameter Collections.sort(holders, new ReverseDateComparator()); String[] projectionToUse; if (projection == null) { projectionToUse = DEFAULT_MESSAGE_PROJECTION; } else { projectionToUse = projection; } LinkedHashMap<String, FieldExtractor<MessageInfoHolder, ?>> extractors = resolveMessageExtractors(projectionToUse, holders.size()); int fieldCount = extractors.size(); String[] actualProjection = extractors.keySet().toArray(new String[fieldCount]); MatrixCursor cursor = new MatrixCursor(actualProjection); for (MessageInfoHolder holder : holders) { Object[] o = new Object[fieldCount]; int i = 0; for (FieldExtractor<MessageInfoHolder, ?> extractor : extractors.values()) { o[i] = extractor.getField(holder); i += 1; } cursor.addRow(o); } return cursor; } protected LinkedHashMap<String, FieldExtractor<MessageInfoHolder, ?>> resolveMessageExtractors( String[] projection, int count) { LinkedHashMap<String, FieldExtractor<MessageInfoHolder, ?>> extractors = new LinkedHashMap<String, FieldExtractor<MessageInfoHolder, ?>>(); for (String field : projection) { if (extractors.containsKey(field)) { continue; } if (MessageColumns._ID.equals(field)) { extractors.put(field, new IdExtractor()); } else if (MessageColumns._COUNT.equals(field)) { extractors.put(field, new CountExtractor<MessageInfoHolder>(count)); } else if (MessageColumns.SUBJECT.equals(field)) { extractors.put(field, new SubjectExtractor()); } else if (MessageColumns.SENDER.equals(field)) { extractors.put(field, new SenderExtractor()); } else if (MessageColumns.SENDER_ADDRESS.equals(field)) { extractors.put(field, new SenderAddressExtractor()); } else if (MessageColumns.SEND_DATE.equals(field)) { extractors.put(field, new SendDateExtractor()); } else if (MessageColumns.PREVIEW.equals(field)) { extractors.put(field, new PreviewExtractor()); } else if (MessageColumns.URI.equals(field)) { extractors.put(field, new UriExtractor()); } else if (MessageColumns.DELETE_URI.equals(field)) { extractors.put(field, new DeleteUriExtractor()); } else if (MessageColumns.UNREAD.equals(field)) { extractors.put(field, new UnreadExtractor()); } else if (MessageColumns.ACCOUNT.equals(field)) { extractors.put(field, new AccountExtractor()); } else if (MessageColumns.ACCOUNT_COLOR.equals(field)) { extractors.put(field, new AccountColorExtractor()); } else if (MessageColumns.ACCOUNT_NUMBER.equals(field)) { extractors.put(field, new AccountNumberExtractor()); } else if (MessageColumns.HAS_ATTACHMENTS.equals(field)) { extractors.put(field, new HasAttachmentsExtractor()); } else if (MessageColumns.HAS_STAR.equals(field)) { extractors.put(field, new HasStarExtractor()); } else if (MessageColumns.INCREMENT.equals(field)) { extractors.put(field, new IncrementExtractor()); } } return extractors; } } /** * Retrieve the account list. */ protected class AccountsQueryHandler implements QueryHandler { @Override public String getPath() { return "accounts"; } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) throws Exception { return getAllAccounts(projection); } public Cursor getAllAccounts(String[] projection) { if (projection == null) { projection = DEFAULT_ACCOUNT_PROJECTION; } MatrixCursor cursor = new MatrixCursor(projection); for (Account account : Preferences.getPreferences(getContext()).getAccounts()) { Object[] values = new Object[projection.length]; int fieldIndex = 0; for (String field : projection) { if (AccountColumns.ACCOUNT_NUMBER.equals(field)) { values[fieldIndex] = account.getAccountNumber(); } else if (AccountColumns.ACCOUNT_NAME.equals(field)) { values[fieldIndex] = account.getDescription(); } else if (AccountColumns.ACCOUNT_UUID.equals(field)) { values[fieldIndex] = account.getUuid(); } else if (AccountColumns.ACCOUNT_COLOR.equals(field)) { values[fieldIndex] = account.getChipColor(); } else { values[fieldIndex] = null; } ++fieldIndex; } cursor.addRow(values); } return cursor; } } /** * Retrieve the unread message count for a given account specified by its {@link Account#getAccountNumber() number}. */ protected class UnreadQueryHandler implements QueryHandler { @Override public String getPath() { return "account_unread/#"; } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) throws Exception { List<String> segments = uri.getPathSegments(); int accountId = Integer.parseInt(segments.get(1)); /* * This method below calls Account.getStats() which uses EmailProvider to do its work. * For this to work we need to clear the calling identity. Otherwise accessing * EmailProvider will fail because it's not exported so third-party apps can't access it * directly. */ long identityToken = Binder.clearCallingIdentity(); try { return getAccountUnread(accountId); } finally { Binder.restoreCallingIdentity(identityToken); } } private Cursor getAccountUnread(int accountNumber) { MatrixCursor cursor = new MatrixCursor(UNREAD_PROJECTION); Account myAccount; AccountStats myAccountStats; Object[] values = new Object[2]; for (Account account : Preferences.getPreferences(getContext()).getAvailableAccounts()) { if (account.getAccountNumber() == accountNumber) { myAccount = account; try { myAccountStats = account.getStats(getContext()); values[0] = myAccount.getDescription(); if (myAccountStats == null) { values[1] = 0; } else { values[1] = myAccountStats.unreadMessageCount; } } catch (MessagingException e) { Log.e(K9.LOG_TAG, e.getMessage()); values[0] = "Unknown"; values[1] = 0; } cursor.addRow(values); } } return cursor; } } /** * Cursor wrapper that release a semaphore on close. Close is also triggered on {@link #finalize()}. */ protected static class MonitoredCursor implements CrossProcessCursor { /** * The underlying cursor implementation that handles regular requests */ private CrossProcessCursor cursor; /** * Whether {@link #close()} was invoked */ private AtomicBoolean closed = new AtomicBoolean(false); private Semaphore semaphore; protected MonitoredCursor(CrossProcessCursor cursor, Semaphore semaphore) { this.cursor = cursor; this.semaphore = semaphore; } @Override public void close() { if (closed.compareAndSet(false, true)) { cursor.close(); Log.d(K9.LOG_TAG, "Cursor closed, null'ing & releasing semaphore"); cursor = null; semaphore.release(); } } @Override public boolean isClosed() { return closed.get() || cursor.isClosed(); } @Override protected void finalize() throws Throwable { close(); super.finalize(); } protected void checkClosed() throws IllegalStateException { if (closed.get()) { throw new IllegalStateException("Cursor was closed"); } } @Override public void fillWindow(int pos, CursorWindow winow) { checkClosed(); cursor.fillWindow(pos, winow); } @Override public CursorWindow getWindow() { checkClosed(); return cursor.getWindow(); } @Override public boolean onMove(int oldPosition, int newPosition) { checkClosed(); return cursor.onMove(oldPosition, newPosition); } @Override public void copyStringToBuffer(int columnIndex, CharArrayBuffer buffer) { checkClosed(); cursor.copyStringToBuffer(columnIndex, buffer); } @Override public void deactivate() { checkClosed(); cursor.deactivate(); } @Override public byte[] getBlob(int columnIndex) { checkClosed(); return cursor.getBlob(columnIndex); } @Override public int getColumnCount() { checkClosed(); return cursor.getColumnCount(); } @Override public int getColumnIndex(String columnName) { checkClosed(); return cursor.getColumnIndex(columnName); } @Override public int getColumnIndexOrThrow(String columnName) throws IllegalArgumentException { checkClosed(); return cursor.getColumnIndexOrThrow(columnName); } @Override public String getColumnName(int columnIndex) { checkClosed(); return cursor.getColumnName(columnIndex); } @Override public String[] getColumnNames() { checkClosed(); return cursor.getColumnNames(); } @Override public int getCount() { checkClosed(); return cursor.getCount(); } @Override public double getDouble(int columnIndex) { checkClosed(); return cursor.getDouble(columnIndex); } @Override public Bundle getExtras() { checkClosed(); return cursor.getExtras(); } @Override public float getFloat(int columnIndex) { checkClosed(); return cursor.getFloat(columnIndex); } @Override public int getInt(int columnIndex) { checkClosed(); return cursor.getInt(columnIndex); } @Override public long getLong(int columnIndex) { checkClosed(); return cursor.getLong(columnIndex); } @Override public int getPosition() { checkClosed(); return cursor.getPosition(); } @Override public short getShort(int columnIndex) { checkClosed(); return cursor.getShort(columnIndex); } @Override public String getString(int columnIndex) { checkClosed(); return cursor.getString(columnIndex); } @Override public boolean getWantsAllOnMoveCalls() { checkClosed(); return cursor.getWantsAllOnMoveCalls(); } @TargetApi(Build.VERSION_CODES.M) @Override public void setExtras(Bundle extras) { cursor.setExtras(extras); } @Override public boolean isAfterLast() { checkClosed(); return cursor.isAfterLast(); } @Override public boolean isBeforeFirst() { checkClosed(); return cursor.isBeforeFirst(); } @Override public boolean isFirst() { checkClosed(); return cursor.isFirst(); } @Override public boolean isLast() { checkClosed(); return cursor.isLast(); } @Override public boolean isNull(int columnIndex) { checkClosed(); return cursor.isNull(columnIndex); } @Override public boolean move(int offset) { checkClosed(); return cursor.move(offset); } @Override public boolean moveToFirst() { checkClosed(); return cursor.moveToFirst(); } @Override public boolean moveToLast() { checkClosed(); return cursor.moveToLast(); } @Override public boolean moveToNext() { checkClosed(); return cursor.moveToNext(); } @Override public boolean moveToPosition(int position) { checkClosed(); return cursor.moveToPosition(position); } @Override public boolean moveToPrevious() { checkClosed(); return cursor.moveToPrevious(); } @Override public void registerContentObserver(ContentObserver observer) { checkClosed(); cursor.registerContentObserver(observer); } @Override public void registerDataSetObserver(DataSetObserver observer) { checkClosed(); cursor.registerDataSetObserver(observer); } @SuppressWarnings("deprecation") @Override public boolean requery() { checkClosed(); return cursor.requery(); } @Override public Bundle respond(Bundle extras) { checkClosed(); return cursor.respond(extras); } @Override public void setNotificationUri(ContentResolver cr, Uri uri) { checkClosed(); cursor.setNotificationUri(cr, uri); } @Override public void unregisterContentObserver(ContentObserver observer) { checkClosed(); cursor.unregisterContentObserver(observer); } @Override public void unregisterDataSetObserver(DataSetObserver observer) { checkClosed(); cursor.unregisterDataSetObserver(observer); } @Override public int getType(int columnIndex) { checkClosed(); return cursor.getType(columnIndex); } @Override public Uri getNotificationUri() { return null; } } protected class ThrottlingQueryHandler implements QueryHandler { private QueryHandler delegate; public ThrottlingQueryHandler(QueryHandler delegate) { this.delegate = delegate; } @Override public String getPath() { return delegate.getPath(); } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) throws Exception { semaphore.acquire(); Cursor cursor = null; try { cursor = delegate.query(uri, projection, selection, selectionArgs, sortOrder); } finally { if (cursor == null) { semaphore.release(); } } // Android content resolvers can only process CrossProcessCursor instances if (!(cursor instanceof CrossProcessCursor)) { Log.w(K9.LOG_TAG, "Unsupported cursor, returning null: " + cursor); semaphore.release(); return null; } MonitoredCursor wrapped = new MonitoredCursor((CrossProcessCursor) cursor, semaphore); // Use a weak reference not to actively prevent garbage collection final WeakReference<MonitoredCursor> weakReference = new WeakReference<MonitoredCursor>(wrapped); // Make sure the cursor is closed after 30 seconds scheduledPool.schedule(new Runnable() { @Override public void run() { MonitoredCursor monitored = weakReference.get(); if (monitored != null && !monitored.isClosed()) { Log.w(K9.LOG_TAG, "Forcibly closing remotely exposed cursor"); try { monitored.close(); } catch (Exception e) { Log.w(K9.LOG_TAG, "Exception while forcibly closing cursor", e); } } } }, 30, TimeUnit.SECONDS); return wrapped; } } /** * Synchronized listener used to retrieve {@link MessageInfoHolder}s using a given {@link BlockingQueue}. */ protected class MessageInfoHolderRetrieverListener extends MessagingListener { private final BlockingQueue<List<MessageInfoHolder>> queue; private List<MessageInfoHolder> holders = new ArrayList<MessageInfoHolder>(); public MessageInfoHolderRetrieverListener(BlockingQueue<List<MessageInfoHolder>> queue) { this.queue = queue; } @Override public void listLocalMessagesAddMessages(Account account, String folderName, List<LocalMessage> messages) { Context context = getContext(); for (LocalMessage message : messages) { MessageInfoHolder messageInfoHolder = new MessageInfoHolder(); LocalFolder messageFolder = message.getFolder(); Account messageAccount = message.getAccount(); FolderInfoHolder folderInfoHolder = new FolderInfoHolder(context, messageFolder, messageAccount); messageHelper.populate(messageInfoHolder, message, folderInfoHolder, messageAccount); holders.add(messageInfoHolder); } } @Override public void searchStats(AccountStats stats) { try { queue.put(holders); } catch (InterruptedException e) { Log.e(K9.LOG_TAG, "Unable to return message list back to caller", e); } } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.structuralsearch.impl.matcher.handlers; import com.intellij.dupLocator.iterators.NodeIterator; import com.intellij.dupLocator.util.NodeFilter; import com.intellij.psi.PsiComment; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiRecursiveElementWalkingVisitor; import com.intellij.structuralsearch.MatchResult; import com.intellij.structuralsearch.impl.matcher.CompiledPattern; import com.intellij.structuralsearch.impl.matcher.MatchContext; import com.intellij.structuralsearch.impl.matcher.MatchResultImpl; import com.intellij.structuralsearch.impl.matcher.filters.DefaultFilter; import com.intellij.structuralsearch.impl.matcher.strategies.MatchingStrategy; import java.util.HashSet; import java.util.Set; /** * Root of handlers for pattern node matching. Handles simplest type of the match. */ public abstract class MatchingHandler { protected NodeFilter filter; private PsiElement pinnedElement; public void setFilter(NodeFilter filter) { this.filter = filter; } /** * Matches given handler node against given value. * @param matchedNode for matching * @param context of the matching * @return true if matching was successful and false otherwise */ public boolean match(PsiElement patternNode, PsiElement matchedNode, MatchContext context) { return (patternNode == null) ? matchedNode == null : canMatch(patternNode, matchedNode, context); } public boolean canMatch(final PsiElement patternNode, final PsiElement matchedNode, MatchContext context) { return (filter != null) ? filter.accepts(matchedNode) : DefaultFilter.accepts(patternNode, matchedNode); } public boolean matchSequentially(NodeIterator patternNodes, NodeIterator matchNodes, MatchContext context) { final MatchingStrategy strategy = context.getPattern().getStrategy(); final PsiElement currentPatternNode = patternNodes.current(); final PsiElement currentMatchNode = matchNodes.current(); skipIfNecessary(matchNodes, currentPatternNode, strategy); skipComments(matchNodes, currentPatternNode); skipIfNecessary(patternNodes, matchNodes.current(), strategy); if (!patternNodes.hasNext()) { return !matchNodes.hasNext(); } final PsiElement patternElement = patternNodes.current(); final MatchingHandler handler = context.getPattern().getHandler(patternElement); if (matchNodes.hasNext() && handler.match(patternElement, matchNodes.current(), context)) { patternNodes.advance(); skipIfNecessary(patternNodes, matchNodes.current(), strategy); if (shouldAdvanceTheMatchFor(patternElement, matchNodes.current())) { matchNodes.advance(); skipIfNecessary(matchNodes, patternNodes.current(), strategy); if (patternNodes.hasNext()) skipComments(matchNodes, patternNodes.current()); } if (patternNodes.hasNext()) { final MatchingHandler nextHandler = context.getPattern().getHandler(patternNodes.current()); if (nextHandler.matchSequentially(patternNodes, matchNodes, context)) { return true; } else { patternNodes.rewindTo(currentPatternNode); matchNodes.rewindTo(currentMatchNode); } } else { // match was found return handler.isMatchSequentiallySucceeded(matchNodes); } } return false; } private static void skipComments(NodeIterator matchNodes, PsiElement patternNode) { final boolean skipComment = !(patternNode instanceof PsiComment); while (skipComment && matchNodes.current() instanceof PsiComment) matchNodes.advance(); } private static void skipIfNecessary(NodeIterator nodes, PsiElement elementToMatchWith, MatchingStrategy strategy) { while (strategy.shouldSkip(nodes.current(), elementToMatchWith)) { nodes.advance(); } } protected boolean isMatchSequentiallySucceeded(final NodeIterator matchNodes) { skipComments(matchNodes, null); return !matchNodes.hasNext(); } static class ClearStateVisitor extends PsiRecursiveElementWalkingVisitor { private CompiledPattern pattern; ClearStateVisitor() { super(true); } @Override public void visitElement(PsiElement element) { // We do not reset certain handlers because they are also bound to higher level nodes // e.g. Identifier handler in name is also bound to PsiMethod if (pattern.isToResetHandler(element)) { final MatchingHandler handler = pattern.getHandlerSimple(element); if (handler != null) { handler.reset(); } } super.visitElement(element); } synchronized void clearState(CompiledPattern _pattern, PsiElement el) { pattern = _pattern; el.acceptChildren(this); pattern = null; } } protected static ClearStateVisitor clearingVisitor = new ClearStateVisitor(); public boolean matchInAnyOrder(NodeIterator patternNodes, NodeIterator matchedNodes, final MatchContext context) { final MatchResultImpl saveResult = context.hasResult() ? context.getResult() : null; context.setResult(null); try { if (patternNodes.hasNext() && !matchedNodes.hasNext()) { return validateSatisfactionOfHandlers(patternNodes, context); } Set<PsiElement> matchedElements = null; while (patternNodes.hasNext()) { final PsiElement patternNode = patternNodes.current(); patternNodes.advance(); final CompiledPattern pattern = context.getPattern(); final MatchingHandler handler = pattern.getHandler(patternNode); matchedNodes.reset(); boolean allElementsMatched = true; int matchedOccurs = 0; do { final PsiElement pinnedNode = handler.getPinnedNode(); final PsiElement matchedNode = (pinnedNode != null) ? pinnedNode : matchedNodes.current(); if (pinnedNode == null) matchedNodes.advance(); if (matchedElements == null || !matchedElements.contains(matchedNode)) { allElementsMatched = false; if (handler.match(patternNode, matchedNode, context)) { matchedOccurs++; if (matchedElements == null) matchedElements = new HashSet<>(); matchedElements.add(matchedNode); if (handler.shouldAdvanceThePatternFor(patternNode, matchedNode)) { break; } } else if (pinnedNode != null) { return false; } // clear state of dependent objects clearingVisitor.clearState(pattern, patternNode); } if (!matchedNodes.hasNext() || pinnedNode != null) { if (!handler.validate(context, matchedOccurs)) return false; if (allElementsMatched || !patternNodes.hasNext()) { final boolean result = validateSatisfactionOfHandlers(patternNodes, context); if (result && matchedElements != null) { context.notifyMatchedElements(matchedElements); } return result; } break; } } while(true); if (!handler.validate(context, matchedOccurs)) return false; } final boolean result = validateSatisfactionOfHandlers(patternNodes, context); if (result && matchedElements != null) { context.notifyMatchedElements(matchedElements); } return result; } finally { if (saveResult != null) { if (context.hasResult()) { for (MatchResult child : context.getResult().getChildren()) { saveResult.addChild(child); } } context.setResult(saveResult); } } } protected static boolean validateSatisfactionOfHandlers(NodeIterator patternNodes, MatchContext context) { for (;patternNodes.hasNext(); patternNodes.advance()) { if (!context.getPattern().getHandler(patternNodes.current()).validate(context, 0)) { return false; } } return true; } boolean validate(MatchContext context, int matchedOccurs) { return matchedOccurs == 1; } public NodeFilter getFilter() { return filter; } public boolean shouldAdvanceThePatternFor(PsiElement patternElement, PsiElement matchedElement) { return true; } public boolean shouldAdvanceTheMatchFor(PsiElement patternElement, PsiElement matchedElement) { return true; } public void reset() { //pinnedElement = null; } public PsiElement getPinnedNode() { return pinnedElement; } public void setPinnedElement(final PsiElement pinnedElement) { this.pinnedElement = pinnedElement; } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.id; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertFalse; import static org.testng.AssertJUnit.assertTrue; import static org.threeten.bp.Month.DECEMBER; import static org.threeten.bp.Month.JANUARY; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.testng.annotations.Test; import org.threeten.bp.LocalDate; import com.google.common.collect.Sets; import com.opengamma.util.test.TestGroup; /** * Test {@link ExternalIdBundleWithDates}. */ @Test(groups = TestGroup.UNIT) public class ExternalIdBundleWithDatesTest { private final ExternalId _id11 = ExternalId.of("D1", "V1"); private final ExternalIdWithDates _idwd11 = ExternalIdWithDates.of(_id11, LocalDate.of(2000, JANUARY, 1), LocalDate.of(2001, JANUARY, 1)); private final ExternalId _id21 = ExternalId.of("D2", "V1"); private final ExternalIdWithDates _idwd21 = ExternalIdWithDates.of(_id21, null, null); private final ExternalId _id12 = ExternalId.of("D1", "V2"); private final ExternalIdWithDates _idwd12 = ExternalIdWithDates.of(_id12, LocalDate.of(2001, JANUARY, 2), null); private final ExternalId _id22 = ExternalId.of("D2", "V2"); private final ExternalIdWithDates _idwd22 = ExternalIdWithDates.of(_id22, null, LocalDate.of(2010, DECEMBER, 30)); public void singleton_empty() { assertEquals(0, ExternalIdBundleWithDates.EMPTY.size()); } //------------------------------------------------------------------------- public void factory_of_varargs_noExternalIds() { ExternalIdBundleWithDates test = ExternalIdBundleWithDates.of(); assertEquals(0, test.size()); } public void factory_of_varargs_oneExternalId() { ExternalIdBundleWithDates test = ExternalIdBundleWithDates.of(_idwd11); assertEquals(1, test.size()); assertEquals(Sets.newHashSet(_idwd11), test.getExternalIds()); } public void factory_of_varargs_twoExternalIds() { ExternalIdBundleWithDates test = ExternalIdBundleWithDates.of(_idwd11, _idwd12); assertEquals(2, test.size()); assertEquals(Sets.newHashSet(_idwd11, _idwd12), test.getExternalIds()); } @Test(expectedExceptions = IllegalArgumentException.class) public void factory_of_varargs_null() { ExternalIdBundleWithDates.of((ExternalIdWithDates[]) null); } @Test(expectedExceptions = IllegalArgumentException.class) public void factory_of_varargs_noNulls() { ExternalIdBundleWithDates.of(_idwd11, null, _idwd12); } //------------------------------------------------------------------------- public void constructor_noargs() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(); assertEquals(0, test.size()); } //------------------------------------------------------------------------- public void constructor_ExternalId() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(_idwd11); assertEquals(1, test.size()); assertEquals(Sets.newHashSet(_idwd11), test.getExternalIds()); } @Test(expectedExceptions = IllegalArgumentException.class) public void constructor_ExternalId_null() { new ExternalIdBundleWithDates((ExternalIdWithDates) null); } //------------------------------------------------------------------------- public void constructor_varargs_empty() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(new ExternalIdWithDates[0]); assertEquals(0, test.size()); } public void constructor_varargs_two() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(_idwd11, _idwd12); assertEquals(2, test.size()); assertEquals(Sets.newHashSet(_idwd11, _idwd12), test.getExternalIds()); } public void constructor_varargs_null() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates((ExternalIdWithDates[]) null); assertEquals(0, test.size()); } @Test(expectedExceptions = IllegalArgumentException.class) public void constructor_varargs_noNulls() { new ExternalIdBundleWithDates(_idwd11, null, _idwd12); } //------------------------------------------------------------------------- public void constructor_Collection_empty() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(new ArrayList<ExternalIdWithDates>()); assertEquals(0, test.size()); } public void constructor_Collection_two() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(Arrays.asList(_idwd11, _idwd12)); assertEquals(2, test.size()); assertEquals(Sets.newHashSet(_idwd11, _idwd12), test.getExternalIds()); } public void constructor_Collection_null() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates((Collection<ExternalIdWithDates>) null); assertEquals(0, test.size()); } @Test(expectedExceptions = IllegalArgumentException.class) public void constructor_Collection_noNulls() { new ExternalIdBundleWithDates(Arrays.asList(_idwd11, null, _idwd12)); } //------------------------------------------------------------------------- public void singleExternalIdDifferentConstructors() { assertTrue(new ExternalIdBundleWithDates(_idwd11).equals(new ExternalIdBundleWithDates(Collections.singleton(_idwd11)))); } public void singleVersusMultipleExternalId() { assertFalse(new ExternalIdBundleWithDates(_idwd11).equals(new ExternalIdBundleWithDates(_idwd11, _idwd12))); assertFalse(new ExternalIdBundleWithDates(_idwd11, _idwd12).equals(new ExternalIdBundleWithDates(_idwd11))); } //------------------------------------------------------------------------- public void toBundle() { ExternalIdBundleWithDates bundleWithDates = new ExternalIdBundleWithDates(_idwd11, _idwd22); assertEquals(ExternalIdBundle.of(_id11, _id22), bundleWithDates.toBundle()); } public void toBundle_LocalDate() { ExternalIdBundleWithDates bundleWithDates = new ExternalIdBundleWithDates(_idwd11, _idwd22); assertEquals(ExternalIdBundle.of(_id11, _id22), bundleWithDates.toBundle(LocalDate.of(2000, 6, 1))); assertEquals(ExternalIdBundle.of(_id22), bundleWithDates.toBundle(LocalDate.of(2002, 6, 1))); assertEquals(ExternalIdBundle.EMPTY, bundleWithDates.toBundle(LocalDate.of(2011, 6, 1))); } //------------------------------------------------------------------------- public void withExternalId() { ExternalIdBundleWithDates base = new ExternalIdBundleWithDates(_idwd11); ExternalIdBundleWithDates test = base.withExternalId(_idwd21); assertEquals(1, base.size()); assertEquals(2, test.size()); assertTrue(test.getExternalIds().contains(_idwd11)); assertTrue(test.getExternalIds().contains(_idwd21)); } @Test(expectedExceptions = IllegalArgumentException.class) public void withExternalId_null() { ExternalIdBundleWithDates base = new ExternalIdBundleWithDates(_idwd11); base.withExternalId(null); } public void withoutExternalId_match() { ExternalIdBundleWithDates base = new ExternalIdBundleWithDates(_idwd11); ExternalIdBundleWithDates test = base.withoutExternalId(_idwd11); assertEquals(1, base.size()); assertEquals(0, test.size()); } public void withoutExternalId_noMatch() { ExternalIdBundleWithDates base = new ExternalIdBundleWithDates(_idwd11); ExternalIdBundleWithDates test = base.withoutExternalId(_idwd12); assertEquals(1, base.size()); assertEquals(1, test.size()); assertTrue(test.getExternalIds().contains(_idwd11)); } @Test(expectedExceptions = IllegalArgumentException.class) public void withoutExternalId_null() { ExternalIdBundleWithDates base = new ExternalIdBundleWithDates(_idwd11); base.withoutExternalId(null); } //------------------------------------------------------------------------- public void test_size() { assertEquals(0, new ExternalIdBundleWithDates().size()); assertEquals(1, new ExternalIdBundleWithDates(_idwd11).size()); assertEquals(2, new ExternalIdBundleWithDates(_idwd11, _idwd12).size()); } //------------------------------------------------------------------------- public void test_iterator() { Set<ExternalIdWithDates> expected = new HashSet<ExternalIdWithDates>(); expected.add(_idwd11); expected.add(_idwd12); Iterable<ExternalIdWithDates> base = new ExternalIdBundleWithDates(_idwd11, _idwd12); Iterator<ExternalIdWithDates> test = base.iterator(); assertEquals(true, test.hasNext()); assertEquals(true, expected.remove(test.next())); assertEquals(true, test.hasNext()); assertEquals(true, expected.remove(test.next())); assertEquals(false, test.hasNext()); assertEquals(0, expected.size()); } //------------------------------------------------------------------------- public void test_containsAny() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(_idwd11, _idwd12); assertEquals(true, test.containsAny(new ExternalIdBundleWithDates(_idwd11, _idwd12))); assertEquals(true, test.containsAny(new ExternalIdBundleWithDates(_idwd11))); assertEquals(true, test.containsAny(new ExternalIdBundleWithDates(_idwd12))); assertEquals(false, test.containsAny(new ExternalIdBundleWithDates(_idwd21))); assertEquals(false, test.containsAny(new ExternalIdBundleWithDates())); } @Test(expectedExceptions = IllegalArgumentException.class) public void test_containsAny_null() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(_idwd11, _idwd12); test.containsAny(null); } //------------------------------------------------------------------------- public void test_contains() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(_idwd11, _idwd12); assertEquals(true, test.contains(_idwd11)); assertEquals(true, test.contains(_idwd11)); assertEquals(false, test.contains(_idwd21)); } public void test_contains_null() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(_idwd11, _idwd12); assertEquals(false, test.contains(null)); } //------------------------------------------------------------------------- public void test_toStringList() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(_idwd11, _idwd12); assertEquals(Arrays.asList(_idwd11.toString(), _idwd12.toString()), test.toStringList()); } public void test_toStringList_empty() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(); assertEquals(new ArrayList<String>(), test.toStringList()); } //------------------------------------------------------------------------- public void test_compareTo_differentSizes() { ExternalIdBundleWithDates a1 = new ExternalIdBundleWithDates(); ExternalIdBundleWithDates a2 = new ExternalIdBundleWithDates(_idwd11); assertEquals(true, a1.compareTo(a1) == 0); assertEquals(true, a1.compareTo(a2) < 0); assertEquals(true, a2.compareTo(a1) > 0); assertEquals(true, a2.compareTo(a2) == 0); } public void test_compareTo_sameSizes() { ExternalIdBundleWithDates a1 = new ExternalIdBundleWithDates(_idwd11); ExternalIdBundleWithDates a2 = new ExternalIdBundleWithDates(_idwd12); assertEquals(true, a1.compareTo(a1) == 0); assertEquals(true, a1.compareTo(a2) < 0); assertEquals(true, a2.compareTo(a1) > 0); assertEquals(true, a2.compareTo(a2) == 0); } //------------------------------------------------------------------------- public void test_equals_same_empty() { ExternalIdBundleWithDates a1 = new ExternalIdBundleWithDates(); ExternalIdBundleWithDates a2 = new ExternalIdBundleWithDates(); assertEquals(true, a1.equals(a1)); assertEquals(true, a1.equals(a2)); assertEquals(true, a2.equals(a1)); assertEquals(true, a2.equals(a2)); } public void test_equals_same_nonEmpty() { ExternalIdBundleWithDates a1 = new ExternalIdBundleWithDates(_idwd11, _idwd12); ExternalIdBundleWithDates a2 = new ExternalIdBundleWithDates(_idwd11, _idwd12); assertEquals(true, a1.equals(a1)); assertEquals(true, a1.equals(a2)); assertEquals(true, a2.equals(a1)); assertEquals(true, a2.equals(a2)); } public void test_equals_different() { ExternalIdBundleWithDates a = new ExternalIdBundleWithDates(); ExternalIdBundleWithDates b = new ExternalIdBundleWithDates(_idwd11, _idwd12); assertEquals(true, a.equals(a)); assertEquals(false, a.equals(b)); assertEquals(false, b.equals(a)); assertEquals(true, b.equals(b)); assertEquals(false, b.equals("Rubbish")); assertEquals(false, b.equals(null)); } public void test_hashCode() { ExternalIdBundleWithDates a = new ExternalIdBundleWithDates(_idwd11, _idwd12); ExternalIdBundleWithDates b = new ExternalIdBundleWithDates(_idwd11, _idwd12); assertEquals(a.hashCode(), b.hashCode()); } public void test_toString_empty() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(); assertEquals("BundleWithDates[]", test.toString()); } public void test_toString_nonEmpty() { ExternalIdBundleWithDates test = new ExternalIdBundleWithDates(_idwd11, _idwd12); assertEquals("BundleWithDates[" + _idwd11.toString() + ", " + _idwd12.toString() + "]", test.toString()); } }
package application.io; import java.io.Closeable; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URLDecoder; import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; import org.apache.commons.net.ftp.FTPClient; import org.apache.commons.net.ftp.FTPSClient; import com.jcraft.jsch.Channel; import com.jcraft.jsch.ChannelSftp; import com.jcraft.jsch.JSch; import com.jcraft.jsch.JSchException; import com.jcraft.jsch.Session; import com.jcraft.jsch.SftpException; import com.jcraft.jsch.UserInfo; public class Sync { private static final String FTP = "ftp://"; private static final String FTPS = "ftps://"; private static final String SFTP = "sftp://"; byte[] buffer = new byte[4096]; byte[] buffer2 = new byte[4096]; private Consumer<String> messageConsumer; private long lastTime = 0; public void setMessageConsumer(Consumer<String> messageConsumer) { this.messageConsumer = messageConsumer; } public Consumer<String> getMessageConsumer() { return messageConsumer; } public void sync(String src, String dest, boolean checkSize, boolean checkDate, boolean checkContent) throws IOException { List<Check> all = new ArrayList<>(); if (checkSize) { all.add(Check.SIZE); } if (checkDate) { all.add(Check.TIMESTAMP); } if (checkContent) { all.add(Check.CONTENT); } Check c = all.isEmpty() ? Check.NOCHECK : Check.all(all.toArray(new Check[all.size()])); sync(src, dest, c); } private void notify(String message) { if (messageConsumer != null) { long now = System.currentTimeMillis(); if (now - lastTime > 1000) { lastTime = now; messageConsumer.accept(message); } } } public void sync(String src, String dest, Check check) throws IOException { Connection csrc = null; Connection cdest = null; try { csrc = build(src); cdest = build(dest); sync0(check, csrc, cdest); } finally { close(csrc); close(cdest); } lastTime = 0; notify("end"); } private void sync0(Check check, Connection csrc, Connection cdest) throws IOException { lastTime = 0; notify(csrc.path() + " <---> " + cdest.path()); Item[] lsrc = csrc.list(); Item[] ldest = cdest.list(); int psrc = 0; int pdest = 0; while (lsrc.length>psrc || ldest.length>pdest) { if (ldest.length==pdest) { send(lsrc[psrc++], csrc, cdest); } else if (lsrc.length==psrc) { remove(ldest[pdest++], cdest); } else { Item iscr = lsrc[psrc++]; Item idest = ldest[pdest++]; if (iscr.getName().equals(idest.getName())) { if (iscr.isDirectory()) { csrc.push(iscr); cdest.push(iscr); sync0(check, csrc, cdest); csrc.pop(); cdest.pop(); } else if (check.test(iscr, idest, this)) { send(iscr, csrc, cdest); } } else if (iscr.getName().compareTo(idest.getName()) > 0) { remove(idest, cdest); psrc--; } else { send(iscr, csrc, cdest); pdest--; } } } } protected void remove(Item next, Connection cdest) throws IOException { if (next.isDirectory()) { cdest.push(next); for (Item item : cdest.list()) { remove(item, cdest); } cdest.pop(); cdest.remove(next); } else { notify("remove " + cdest.path() + "/" + next.getName()); cdest.remove(next); } } protected void send(Item next, Connection csrc, Connection cdest) throws IOException { if (next.isDirectory()) { cdest.mkdir(next.getName()); csrc.push(next); cdest.push(next); for (Item item : csrc.list()) { send(item, csrc, cdest); } csrc.pop(); cdest.pop(); } else { notify("copy to " + cdest.path() + "/" + next.getName()); OutputStream out = null; InputStream in = null; try { out = cdest.set(next); in = csrc.get(next); int count = 0; byte[] b = buffer; while ((count = in.read(b)) != -1) { out.write(b, 0, count); } } finally { close(out); close(in); } } } public void close(Closeable c) { if (c != null) { try { c.close(); } catch (IOException e) { } } } public Connection build(String addr) throws IOException { if (addr.startsWith(FTP)) { FTPClient client = new FTPClient(); configure(client, URI.create(addr), false); return new FTPConnection(client, this); } if (addr.startsWith(FTPS)) { FTPClient client = new FTPSClient(); configure(client, URI.create(addr), false); return new FTPConnection(client, this); } if (addr.startsWith(SFTP)) { JSch jsch = new JSch(); URI uri = URI.create(addr); String userInfo = uri.getRawUserInfo(); int split = userInfo.indexOf(':'); String login = decode(userInfo.substring(0, split)); String password = decode(userInfo.substring(split + 1)); int port = 22; if (uri.getPort() != -1) { port = uri.getPort(); } try { Session session = jsch.getSession(login, uri.getHost(), port); UserInfo info = new UserInfo() { private boolean firstTime = true; @Override public void showMessage(String message) { } @Override public boolean promptYesNo(String message) { return true; } @Override public boolean promptPassword(String message) { if (firstTime) { firstTime = false; return true; } return firstTime; } @Override public boolean promptPassphrase(String message) { return true; } @Override public String getPassword() { return password; } @Override public String getPassphrase() { return null; } }; session.setUserInfo(info); session.connect(); Channel channel = session.openChannel("sftp"); channel.connect(); ChannelSftp c = (ChannelSftp) channel; if (uri.getPath() != null && uri.getPath().length() > 1) { c.cd(uri.getPath().substring(1)); } return new SFTPConnection(session, c, this); } catch (SftpException| JSchException e) { throw new IOException(e); } } if (addr.startsWith("file:")) { File f = new File(URI.create(addr).toURL().getFile()); check(addr, f); return new FsConnection(f,this); } File f = new File(addr); check(addr, f); return new FsConnection(new File(addr),this); } public void check(String addr, File f) throws IOException { if (!f.exists()) { throw new FileNotFoundException(addr); } if (!f.isDirectory()) { throw new IOException("the file must be a directory : " + addr); } } private void configure(FTPClient client, URI addr, boolean sftp) throws IOException { String userInfo = addr.getRawUserInfo(); if (addr.getPort() == -1) { client.connect(addr.getHost()); } else { client.connect(addr.getHost(), addr.getPort()); } if (userInfo != null) { int split = userInfo.indexOf(':'); client.login(decode(userInfo.substring(0, split)), decode(userInfo.substring(split + 1))); } if (addr.getPath() != null && addr.getPath().length() > 1) { client.changeWorkingDirectory(addr.getPath().substring(1)); } } private String decode(String value) throws IOException { try { return URLDecoder.decode(value, "utf8"); } catch (UnsupportedEncodingException e) { throw new IOException(e); } } }
/* * Copyright 2013-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.netflix.zuul.filters; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.Enumeration; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javax.servlet.http.HttpServletRequest; import lombok.extern.apachecommons.CommonsLog; import org.apache.commons.io.IOUtils; import org.springframework.boot.actuate.trace.TraceRepository; import org.springframework.http.HttpHeaders; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.util.StringUtils; import org.springframework.web.util.UriUtils; import org.springframework.web.util.WebUtils; import com.netflix.zuul.context.RequestContext; import com.netflix.zuul.util.HTTPRequestUtils; /** * @author Dave Syer */ @CommonsLog public class ProxyRequestHelper { /** * Zuul context key for a collection of ignored headers for the current request. * Pre-filters can set this up as a set of lowercase strings. */ public static final String IGNORED_HEADERS = "ignoredHeaders"; public static final String CONTENT_ENCODING = "Content-Encoding"; private TraceRepository traces; public void setTraces(TraceRepository traces) { this.traces = traces; } public String buildZuulRequestURI(HttpServletRequest request) { RequestContext context = RequestContext.getCurrentContext(); String uri = request.getRequestURI(); String contextURI = (String) context.get("requestURI"); if (contextURI != null) { try { uri = UriUtils.encodePath(contextURI, WebUtils.DEFAULT_CHARACTER_ENCODING); } catch (Exception e) { log.debug("unable to encode uri path from context, falling back to uri from request", e); } } return uri; } public MultiValueMap<String, String> buildZuulRequestQueryParams( HttpServletRequest request) { Map<String, List<String>> map = HTTPRequestUtils.getInstance().getQueryParams(); MultiValueMap<String, String> params = new LinkedMultiValueMap<>(); if (map == null) { return params; } for (String key : map.keySet()) { for (String value : map.get(key)) { params.add(key, value); } } return params; } public MultiValueMap<String, String> buildZuulRequestHeaders( HttpServletRequest request) { RequestContext context = RequestContext.getCurrentContext(); MultiValueMap<String, String> headers = new HttpHeaders(); Enumeration<String> headerNames = request.getHeaderNames(); if (headerNames != null) { while (headerNames.hasMoreElements()) { String name = headerNames.nextElement(); if (isIncludedHeader(name)) { Enumeration<String> values = request.getHeaders(name); while (values.hasMoreElements()) { String value = values.nextElement(); headers.add(name, value); } } } } Map<String, String> zuulRequestHeaders = context.getZuulRequestHeaders(); for (String header : zuulRequestHeaders.keySet()) { headers.set(header, zuulRequestHeaders.get(header)); } headers.set(HttpHeaders.ACCEPT_ENCODING, "gzip"); return headers; } public void setResponse(int status, InputStream entity, MultiValueMap<String, String> headers) throws IOException { RequestContext context = RequestContext.getCurrentContext(); RequestContext.getCurrentContext().setResponseStatusCode(status); if (entity != null) { RequestContext.getCurrentContext().setResponseDataStream(entity); } boolean isOriginResponseGzipped = false; if (headers.containsKey(CONTENT_ENCODING)) { Collection<String> collection = headers.get(CONTENT_ENCODING); for (String header : collection) { if (HTTPRequestUtils.getInstance().isGzipped(header)) { isOriginResponseGzipped = true; break; } } } context.setResponseGZipped(isOriginResponseGzipped); for (Entry<String, List<String>> header : headers.entrySet()) { RequestContext ctx = RequestContext.getCurrentContext(); String name = header.getKey(); for (String value : header.getValue()) { ctx.addOriginResponseHeader(name, value); if (name.equalsIgnoreCase("content-length")) { ctx.setOriginContentLength(value); } if (isIncludedHeader(name)) { ctx.addZuulResponseHeader(name, value); } } } } public void addIgnoredHeaders(String... names) { RequestContext ctx = RequestContext.getCurrentContext(); if (!ctx.containsKey(IGNORED_HEADERS)) { ctx.set(IGNORED_HEADERS, new HashSet<String>()); } @SuppressWarnings("unchecked") Set<String> set = (Set<String>) ctx.get(IGNORED_HEADERS); for (String name : names) { set.add(name.toLowerCase()); } } public boolean isIncludedHeader(String headerName) { String name = headerName.toLowerCase(); RequestContext ctx = RequestContext.getCurrentContext(); if (ctx.containsKey(IGNORED_HEADERS)) { Object object = ctx.get(IGNORED_HEADERS); if (object instanceof Collection && ((Collection<?>) object).contains(name)) { return false; } } switch (name) { case "host": case "connection": case "content-length": case "content-encoding": case "server": case "transfer-encoding": return false; default: return true; } } public Map<String, Object> debug(String verb, String uri, MultiValueMap<String, String> headers, MultiValueMap<String, String> params, InputStream requestEntity) throws IOException { Map<String, Object> info = new LinkedHashMap<String, Object>(); if (this.traces != null) { RequestContext context = RequestContext.getCurrentContext(); StringBuilder query = new StringBuilder(); for (String param : params.keySet()) { for (String value : params.get(param)) { query.append(param); query.append("="); query.append(value); query.append("&"); } } info.put("method", verb); info.put("path", uri); info.put("query", query.toString()); info.put("remote", true); info.put("proxy", context.get("proxy")); Map<String, Object> trace = new LinkedHashMap<String, Object>(); Map<String, Object> input = new LinkedHashMap<String, Object>(); trace.put("request", input); info.put("headers", trace); for (Entry<String, List<String>> entry : headers.entrySet()) { Collection<String> collection = entry.getValue(); Object value = collection; if (collection.size() < 2) { value = collection.isEmpty() ? "" : collection.iterator().next(); } input.put(entry.getKey(), value); } RequestContext ctx = RequestContext.getCurrentContext(); if (!ctx.isChunkedRequestBody()) { if (requestEntity != null) { debugRequestEntity(info, ctx.getRequest().getInputStream()); } } this.traces.add(info); return info; } return info; } public void appendDebug(Map<String, Object> info, int status, MultiValueMap<String, String> headers) { if (this.traces != null) { @SuppressWarnings("unchecked") Map<String, Object> trace = (Map<String, Object>) info.get("headers"); Map<String, Object> output = new LinkedHashMap<String, Object>(); trace.put("response", output); for (Entry<String, List<String>> key : headers.entrySet()) { Collection<String> collection = key.getValue(); Object value = collection; if (collection.size() < 2) { value = collection.isEmpty() ? "" : collection.iterator().next(); } output.put(key.getKey(), value); } output.put("status", "" + status); } } private void debugRequestEntity(Map<String, Object> info, InputStream inputStream) throws IOException { if (RequestContext.getCurrentContext().isChunkedRequestBody()) { info.put("body", "<chunked>"); return; } String entity = IOUtils.toString(inputStream); if (StringUtils.hasText(entity)) { info.put("body", entity.length() <= 4096 ? entity : entity.substring(0, 4096) + "<truncated>"); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.9.2) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.airavata.model.commons; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3") public class ValidationResults implements org.apache.thrift.TBase<ValidationResults, ValidationResults._Fields>, java.io.Serializable, Cloneable, Comparable<ValidationResults> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ValidationResults"); private static final org.apache.thrift.protocol.TField VALIDATION_STATE_FIELD_DESC = new org.apache.thrift.protocol.TField("validationState", org.apache.thrift.protocol.TType.BOOL, (short)1); private static final org.apache.thrift.protocol.TField VALIDATION_RESULT_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("validationResultList", org.apache.thrift.protocol.TType.LIST, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new ValidationResultsStandardSchemeFactory()); schemes.put(TupleScheme.class, new ValidationResultsTupleSchemeFactory()); } private boolean validationState; // required private List<ValidatorResult> validationResultList; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { VALIDATION_STATE((short)1, "validationState"), VALIDATION_RESULT_LIST((short)2, "validationResultList"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // VALIDATION_STATE return VALIDATION_STATE; case 2: // VALIDATION_RESULT_LIST return VALIDATION_RESULT_LIST; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __VALIDATIONSTATE_ISSET_ID = 0; private byte __isset_bitfield = 0; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.VALIDATION_STATE, new org.apache.thrift.meta_data.FieldMetaData("validationState", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); tmpMap.put(_Fields.VALIDATION_RESULT_LIST, new org.apache.thrift.meta_data.FieldMetaData("validationResultList", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ValidatorResult.class)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ValidationResults.class, metaDataMap); } public ValidationResults() { } public ValidationResults( boolean validationState, List<ValidatorResult> validationResultList) { this(); this.validationState = validationState; setValidationStateIsSet(true); this.validationResultList = validationResultList; } /** * Performs a deep copy on <i>other</i>. */ public ValidationResults(ValidationResults other) { __isset_bitfield = other.__isset_bitfield; this.validationState = other.validationState; if (other.isSetValidationResultList()) { List<ValidatorResult> __this__validationResultList = new ArrayList<ValidatorResult>(other.validationResultList.size()); for (ValidatorResult other_element : other.validationResultList) { __this__validationResultList.add(new ValidatorResult(other_element)); } this.validationResultList = __this__validationResultList; } } public ValidationResults deepCopy() { return new ValidationResults(this); } @Override public void clear() { setValidationStateIsSet(false); this.validationState = false; this.validationResultList = null; } public boolean isValidationState() { return this.validationState; } public void setValidationState(boolean validationState) { this.validationState = validationState; setValidationStateIsSet(true); } public void unsetValidationState() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __VALIDATIONSTATE_ISSET_ID); } /** Returns true if field validationState is set (has been assigned a value) and false otherwise */ public boolean isSetValidationState() { return EncodingUtils.testBit(__isset_bitfield, __VALIDATIONSTATE_ISSET_ID); } public void setValidationStateIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __VALIDATIONSTATE_ISSET_ID, value); } public int getValidationResultListSize() { return (this.validationResultList == null) ? 0 : this.validationResultList.size(); } public java.util.Iterator<ValidatorResult> getValidationResultListIterator() { return (this.validationResultList == null) ? null : this.validationResultList.iterator(); } public void addToValidationResultList(ValidatorResult elem) { if (this.validationResultList == null) { this.validationResultList = new ArrayList<ValidatorResult>(); } this.validationResultList.add(elem); } public List<ValidatorResult> getValidationResultList() { return this.validationResultList; } public void setValidationResultList(List<ValidatorResult> validationResultList) { this.validationResultList = validationResultList; } public void unsetValidationResultList() { this.validationResultList = null; } /** Returns true if field validationResultList is set (has been assigned a value) and false otherwise */ public boolean isSetValidationResultList() { return this.validationResultList != null; } public void setValidationResultListIsSet(boolean value) { if (!value) { this.validationResultList = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case VALIDATION_STATE: if (value == null) { unsetValidationState(); } else { setValidationState((Boolean)value); } break; case VALIDATION_RESULT_LIST: if (value == null) { unsetValidationResultList(); } else { setValidationResultList((List<ValidatorResult>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case VALIDATION_STATE: return Boolean.valueOf(isValidationState()); case VALIDATION_RESULT_LIST: return getValidationResultList(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case VALIDATION_STATE: return isSetValidationState(); case VALIDATION_RESULT_LIST: return isSetValidationResultList(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof ValidationResults) return this.equals((ValidationResults)that); return false; } public boolean equals(ValidationResults that) { if (that == null) return false; boolean this_present_validationState = true; boolean that_present_validationState = true; if (this_present_validationState || that_present_validationState) { if (!(this_present_validationState && that_present_validationState)) return false; if (this.validationState != that.validationState) return false; } boolean this_present_validationResultList = true && this.isSetValidationResultList(); boolean that_present_validationResultList = true && that.isSetValidationResultList(); if (this_present_validationResultList || that_present_validationResultList) { if (!(this_present_validationResultList && that_present_validationResultList)) return false; if (!this.validationResultList.equals(that.validationResultList)) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_validationState = true; list.add(present_validationState); if (present_validationState) list.add(validationState); boolean present_validationResultList = true && (isSetValidationResultList()); list.add(present_validationResultList); if (present_validationResultList) list.add(validationResultList); return list.hashCode(); } @Override public int compareTo(ValidationResults other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetValidationState()).compareTo(other.isSetValidationState()); if (lastComparison != 0) { return lastComparison; } if (isSetValidationState()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.validationState, other.validationState); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetValidationResultList()).compareTo(other.isSetValidationResultList()); if (lastComparison != 0) { return lastComparison; } if (isSetValidationResultList()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.validationResultList, other.validationResultList); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("ValidationResults("); boolean first = true; sb.append("validationState:"); sb.append(this.validationState); first = false; if (!first) sb.append(", "); sb.append("validationResultList:"); if (this.validationResultList == null) { sb.append("null"); } else { sb.append(this.validationResultList); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!isSetValidationState()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'validationState' is unset! Struct:" + toString()); } if (!isSetValidationResultList()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'validationResultList' is unset! Struct:" + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class ValidationResultsStandardSchemeFactory implements SchemeFactory { public ValidationResultsStandardScheme getScheme() { return new ValidationResultsStandardScheme(); } } private static class ValidationResultsStandardScheme extends StandardScheme<ValidationResults> { public void read(org.apache.thrift.protocol.TProtocol iprot, ValidationResults struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // VALIDATION_STATE if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { struct.validationState = iprot.readBool(); struct.setValidationStateIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // VALIDATION_RESULT_LIST if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list8 = iprot.readListBegin(); struct.validationResultList = new ArrayList<ValidatorResult>(_list8.size); ValidatorResult _elem9; for (int _i10 = 0; _i10 < _list8.size; ++_i10) { _elem9 = new ValidatorResult(); _elem9.read(iprot); struct.validationResultList.add(_elem9); } iprot.readListEnd(); } struct.setValidationResultListIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, ValidationResults struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); oprot.writeFieldBegin(VALIDATION_STATE_FIELD_DESC); oprot.writeBool(struct.validationState); oprot.writeFieldEnd(); if (struct.validationResultList != null) { oprot.writeFieldBegin(VALIDATION_RESULT_LIST_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.validationResultList.size())); for (ValidatorResult _iter11 : struct.validationResultList) { _iter11.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class ValidationResultsTupleSchemeFactory implements SchemeFactory { public ValidationResultsTupleScheme getScheme() { return new ValidationResultsTupleScheme(); } } private static class ValidationResultsTupleScheme extends TupleScheme<ValidationResults> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, ValidationResults struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; oprot.writeBool(struct.validationState); { oprot.writeI32(struct.validationResultList.size()); for (ValidatorResult _iter12 : struct.validationResultList) { _iter12.write(oprot); } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, ValidationResults struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; struct.validationState = iprot.readBool(); struct.setValidationStateIsSet(true); { org.apache.thrift.protocol.TList _list13 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.validationResultList = new ArrayList<ValidatorResult>(_list13.size); ValidatorResult _elem14; for (int _i15 = 0; _i15 < _list13.size; ++_i15) { _elem14 = new ValidatorResult(); _elem14.read(iprot); struct.validationResultList.add(_elem14); } } struct.setValidationResultListIsSet(true); } } }
/* * This file is part of the DITA Open Toolkit project. * * Copyright 2004, 2005 IBM Corporation * * See the accompanying LICENSE file for applicable license. */ package org.dita.dost.reader; import static javax.xml.transform.OutputKeys.*; import static org.dita.dost.util.Constants.*; import static org.dita.dost.util.URLUtils.*; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.OutputStream; import java.net.URI; import java.util.Stack; import javax.xml.transform.TransformerFactory; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.stream.StreamResult; import org.xml.sax.helpers.XMLFilterImpl; import org.xml.sax.helpers.AttributesImpl; import org.dita.dost.log.DITAOTLogger; import org.dita.dost.log.MessageUtils; import org.dita.dost.util.FileUtils; import org.dita.dost.util.Job; import org.dita.dost.util.Job.FileInfo; import org.dita.dost.util.MergeUtils; import org.dita.dost.util.URLUtils; import org.dita.dost.util.XMLUtils; import org.xml.sax.Attributes; import org.xml.sax.SAXException; /** * MergeMapParser reads the ditamap file after preprocessing and merges * different files into one intermediate result. It calls MergeTopicParser * to process the topic file. Instances are reusable but not thread-safe. */ public final class MergeMapParser extends XMLFilterImpl { private static final String ATTRIBUTE_NAME_FIRST_TOPIC_ID = "first_topic_id"; public static final String ATTRIBUTE_NAME_OHREF = "ohref"; public static final String ATTRIBUTE_NAME_OID = "oid"; private final MergeTopicParser topicParser; private final MergeUtils util; private File dirPath = null; private File tempdir = null; private final Stack<String> processStack; private int processLevel; private final ByteArrayOutputStream topicBuffer; private final SAXTransformerFactory stf; private OutputStream output; private DITAOTLogger logger; private Job job; /** * Default Constructor. */ public MergeMapParser() { processStack = new Stack<>(); processLevel = 0; util = new MergeUtils(); topicParser = new MergeTopicParser(util); topicBuffer = new ByteArrayOutputStream(); try { final TransformerFactory tf = TransformerFactory.newInstance(); if (!tf.getFeature(SAXTransformerFactory.FEATURE)) { throw new RuntimeException("SAX transformation factory not supported"); } stf = (SAXTransformerFactory) tf; final TransformerHandler s = stf.newTransformerHandler(); s.getTransformer().setOutputProperty(OMIT_XML_DECLARATION , "yes"); s.setResult(new StreamResult(topicBuffer)); topicParser.setContentHandler(s); } catch (final RuntimeException e) { throw e; } catch (final Exception e) { throw new RuntimeException("Failed to initialize XML parser: " + e.getMessage(), e); } } public final void setLogger(final DITAOTLogger logger) { this.logger = logger; util.setLogger(logger); topicParser.setLogger(logger); } public final void setJob(final Job job) { this.job = job; util.setJob(job); topicParser.setJob(job); } /** * Set merge output file * * @param outputFile merge output file */ public void setOutput(final File outputFile) { topicParser.setOutput(outputFile); } /** * Set output. * * @param output output stream */ public void setOutputStream(final OutputStream output) { this.output = output; } /** * Read map. * * @param filename map file path * @param tmpDir temporary directory path, may be {@code null} */ public void read(final File filename, final File tmpDir) { tempdir = tmpDir != null ? tmpDir : filename.getParentFile(); try { final TransformerHandler s = stf.newTransformerHandler(); s.getTransformer().setOutputProperty(OMIT_XML_DECLARATION, "yes"); s.setResult(new StreamResult(output)); setContentHandler(s); dirPath = filename.getParentFile(); topicParser.getContentHandler().startDocument(); logger.info("Processing " + filename.toURI()); job.getStore().transform(filename.toURI(), this); topicParser.getContentHandler().endDocument(); output.write(topicBuffer.toByteArray()); } catch (final RuntimeException e) { throw e; } catch (final Exception e) { logger.error(e.getMessage(), e) ; } } @Override public void endElement(final String uri, final String localName, final String qName) throws SAXException { if (processLevel > 0) { String value = processStack.peek(); if (processLevel == processStack.size()) { value = processStack.pop(); } processLevel--; if (ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY.equals(value)) { return; } } getContentHandler().endElement(uri, localName, qName); } @Override public void characters(final char[] ch, final int start, final int length) throws SAXException { if (processStack.empty() || !ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY.equals(processStack.peek())) { getContentHandler().characters(ch, start, length); } } @Override public void startElement(final String uri, final String localName, final String qName, final Attributes attributes) throws SAXException { final String attrValue = attributes.getValue(ATTRIBUTE_NAME_PROCESSING_ROLE); if (attrValue != null) { processStack.push(attrValue); processLevel++; // @processing-role='resource-only' if (ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY.equals(attrValue)) { return; } } else if (processLevel > 0) { processLevel++; // Child of @processing-role='resource-only' if (ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY.equals(processStack.peek())) { return; } } AttributesImpl atts = null; if (MAP_TOPICREF.matches(attributes)) { URI attValue = toURI(attributes.getValue(ATTRIBUTE_NAME_HREF)); if (attValue != null) { atts = new AttributesImpl(attributes); final String scopeValue = atts.getValue(ATTRIBUTE_NAME_SCOPE); final String formatValue = atts.getValue(ATTRIBUTE_NAME_FORMAT); if ((scopeValue == null || ATTR_SCOPE_VALUE_LOCAL.equals(scopeValue)) && (formatValue == null || ATTR_FORMAT_VALUE_DITA.equals(formatValue))) { final URI ohref = attValue; final URI copyToValue = toURI(atts.getValue(ATTRIBUTE_NAME_COPY_TO)); if (copyToValue != null && !copyToValue.toString().isEmpty()) { attValue = copyToValue; } final URI absTarget = URLUtils.toDirURI(dirPath).resolve(attValue); XMLUtils.addOrSetAttribute(atts, ATTRIBUTE_NAME_OHREF, ohref.toString()); if (util.isVisited(absTarget)) { attValue = toURI(SHARP + util.getIdValue(absTarget)); } else { //parse the topic final URI p = stripFragment(attValue).normalize(); util.visit(absTarget); final File f = new File(stripFragment(absTarget)); if (job.getStore().exists(f.toURI())) { topicParser.parse(toFile(p).getPath(), dirPath); final String fileId = topicParser.getFirstTopicId(); if (util.getIdValue(absTarget) == null) { util.addId(absTarget, fileId); } if (attValue.getFragment() != null && util.getIdValue(stripFragment(absTarget)) == null) { util.addId(stripFragment(absTarget), fileId); } final URI firstTopicId = toURI(SHARP + fileId); if (util.getIdValue(absTarget) != null) { attValue = toURI(SHARP + util.getIdValue(absTarget)); } else { attValue = firstTopicId; } XMLUtils.addOrSetAttribute(atts, ATTRIBUTE_NAME_FIRST_TOPIC_ID, firstTopicId.toString()); } else { final URI fileName = URLUtils.toDirURI(dirPath).resolve(attValue); logger.error(MessageUtils.getMessage("DOTX008E", fileName.toString()).toString()); } } } XMLUtils.addOrSetAttribute(atts, ATTRIBUTE_NAME_HREF, attValue.toString()); } } getContentHandler().startElement(uri, localName, qName, atts != null ? atts : attributes); } @Override public void endDocument() throws SAXException { // read href dita topic list // compare visitedSet with the list // if list item not in visitedSet then call MergeTopicParser to parse it try { for (final FileInfo f: job.getFileInfo()) { if (f.isTarget) { String element = f.file.getPath(); if (!dirPath.equals(tempdir)) { element = FileUtils.getRelativeUnixPath(new File(dirPath,"a.ditamap").getAbsolutePath(), new File(tempdir, element).getAbsolutePath()); } final URI abs = job.tempDirURI.resolve(f.uri); if (!util.isVisited(abs)) { util.visit(abs); if (!f.isResourceOnly) { //ensure the file exists final File file = new File(dirPath, element); if (job.getStore().exists(file.toURI())) { topicParser.parse(element, dirPath); } else { final String fileName = file.getAbsolutePath(); logger.debug(MessageUtils.getMessage("DOTX008E", fileName).toString()); } } } } } } catch (final RuntimeException e) { throw e; } catch (final Exception e) { logger.error(e.getMessage(), e) ; } getContentHandler().endDocument(); } }
package org.schema.api.model.thing.event; import org.schema.api.model.thing.Person; import org.schema.api.model.thing.Thing; import org.schema.api.model.thing.creativeWork.CreativeWork; import org.schema.api.model.thing.creativeWork.review.Review; import org.schema.api.model.thing.intangible.audience.Audience; import org.schema.api.model.thing.intangible.enumeration.EventStatusType; import org.schema.api.model.thing.intangible.offer.Offer; import org.schema.api.model.thing.intangible.quantity.Duration; import org.schema.api.model.thing.intangible.rating.AggregateRating; import org.schema.api.model.thing.organization.Organization; import java.util.Date; public class SaleEvent extends Event { private Thing about; private Person actor; private AggregateRating aggregateRating; private Organization attendee;//Notes - Allowed types- [Organization, Person] private Audience audience;//Notes - Allowed types- [Audience, serviceAudience] private Organization composer;//Notes - Allowed types- [Organization, Person] private Organization contributor;//Notes - Allowed types- [Organization, Person] private Person director; private Date doorTime; private Duration duration;//Notes - Allowed types- [Duration, ISO 8601 date format] private Date endDate;//Notes - Allowed types- [Date, DateTime] private EventStatusType eventStatus; private Organization funder;//Notes - Allowed types- [Organization, Person] private String inLanguage;//Notes - Allowed types- [Language, Text] private Boolean isAccessibleForFree;//Notes - Allowed types- [Boolean, free] private String location;//Notes - Allowed types- [Place, PostalAddress, Text] private Integer maximumAttendeeCapacity; private Offer offers; private Organization organizer;//Notes - Allowed types- [Organization, Person] private Organization performer;//Notes - Allowed types- [Organization, Person] private Date previousStartDate; private CreativeWork recordedIn;//Notes - Allowed types- [CreativeWork, recordedAt] private Integer remainingAttendeeCapacity; private Review review;//Notes - Allowed types- [Review, reviews] private Organization sponsor;//Notes - Allowed types- [Organization, Person] private Date startDate;//Notes - Allowed types- [Date, DateTime] private Event subEvent;//Notes - Allowed types- [Event, superEvent] private Event superEvent;//Notes - Allowed types- [Event, subEvent] private Organization translator;//Notes - Allowed types- [Organization, Person] private String typicalAgeRange; private CreativeWork workFeatured; private CreativeWork workPerformed; public Thing getAbout() { return about; } public void setAbout(Thing about) { this.about = about; } public Person getActor() { return actor; } public void setActor(Person actor) { this.actor = actor; } public AggregateRating getAggregateRating() { return aggregateRating; } public void setAggregateRating(AggregateRating aggregateRating) { this.aggregateRating = aggregateRating; } public Organization getAttendee() { return attendee; } public void setAttendee(Organization attendee) { this.attendee = attendee; } public Audience getAudience() { return audience; } public void setAudience(Audience audience) { this.audience = audience; } public Organization getComposer() { return composer; } public void setComposer(Organization composer) { this.composer = composer; } public Organization getContributor() { return contributor; } public void setContributor(Organization contributor) { this.contributor = contributor; } public Person getDirector() { return director; } public void setDirector(Person director) { this.director = director; } public Date getDoorTime() { return doorTime; } public void setDoorTime(Date doorTime) { this.doorTime = doorTime; } public Duration getDuration() { return duration; } public void setDuration(Duration duration) { this.duration = duration; } public Date getEndDate() { return endDate; } public void setEndDate(Date endDate) { this.endDate = endDate; } public EventStatusType getEventStatus() { return eventStatus; } public void setEventStatus(EventStatusType eventStatus) { this.eventStatus = eventStatus; } public Organization getFunder() { return funder; } public void setFunder(Organization funder) { this.funder = funder; } public String getInLanguage() { return inLanguage; } public void setInLanguage(String inLanguage) { this.inLanguage = inLanguage; } public Boolean getIsAccessibleForFree() { return isAccessibleForFree; } public void setIsAccessibleForFree(Boolean isAccessibleForFree) { this.isAccessibleForFree = isAccessibleForFree; } public String getLocation() { return location; } public void setLocation(String location) { this.location = location; } public Integer getMaximumAttendeeCapacity() { return maximumAttendeeCapacity; } public void setMaximumAttendeeCapacity(Integer maximumAttendeeCapacity) { this.maximumAttendeeCapacity = maximumAttendeeCapacity; } public Offer getOffers() { return offers; } public void setOffers(Offer offers) { this.offers = offers; } public Organization getOrganizer() { return organizer; } public void setOrganizer(Organization organizer) { this.organizer = organizer; } public Organization getPerformer() { return performer; } public void setPerformer(Organization performer) { this.performer = performer; } public Date getPreviousStartDate() { return previousStartDate; } public void setPreviousStartDate(Date previousStartDate) { this.previousStartDate = previousStartDate; } public CreativeWork getRecordedIn() { return recordedIn; } public void setRecordedIn(CreativeWork recordedIn) { this.recordedIn = recordedIn; } public Integer getRemainingAttendeeCapacity() { return remainingAttendeeCapacity; } public void setRemainingAttendeeCapacity(Integer remainingAttendeeCapacity) { this.remainingAttendeeCapacity = remainingAttendeeCapacity; } public Review getReview() { return review; } public void setReview(Review review) { this.review = review; } public Organization getSponsor() { return sponsor; } public void setSponsor(Organization sponsor) { this.sponsor = sponsor; } public Date getStartDate() { return startDate; } public void setStartDate(Date startDate) { this.startDate = startDate; } public Event getSubEvent() { return subEvent; } public void setSubEvent(Event subEvent) { this.subEvent = subEvent; } public Event getSuperEvent() { return superEvent; } public void setSuperEvent(Event superEvent) { this.superEvent = superEvent; } public Organization getTranslator() { return translator; } public void setTranslator(Organization translator) { this.translator = translator; } public String getTypicalAgeRange() { return typicalAgeRange; } public void setTypicalAgeRange(String typicalAgeRange) { this.typicalAgeRange = typicalAgeRange; } public CreativeWork getWorkFeatured() { return workFeatured; } public void setWorkFeatured(CreativeWork workFeatured) { this.workFeatured = workFeatured; } public CreativeWork getWorkPerformed() { return workPerformed; } public void setWorkPerformed(CreativeWork workPerformed) { this.workPerformed = workPerformed; } }
/* * Artifactory is a binaries repository manager. * Copyright (C) 2012 JFrog Ltd. * * Artifactory is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Artifactory is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Artifactory. If not, see <http://www.gnu.org/licenses/>. */ package org.artifactory.search.deployable; import com.google.common.collect.HashMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.lang.StringUtils; import org.artifactory.api.module.ModuleInfo; import org.artifactory.api.module.ModuleInfoBuilder; import org.artifactory.api.module.VersionUnit; import org.artifactory.api.search.VersionSearchResults; import org.artifactory.api.search.deployable.VersionUnitSearchControls; import org.artifactory.api.search.deployable.VersionUnitSearchResult; import org.artifactory.api.security.AuthorizationService; import org.artifactory.aql.AqlService; import org.artifactory.aql.api.domain.sensitive.AqlApiItem; import org.artifactory.aql.api.internal.AqlBase; import org.artifactory.aql.model.AqlFieldEnum; import org.artifactory.aql.result.AqlLazyResult; import org.artifactory.aql.util.AqlSearchablePath; import org.artifactory.aql.util.AqlUtils; import org.artifactory.common.ConstantValues; import org.artifactory.repo.Repo; import org.artifactory.repo.RepoPath; import org.artifactory.search.SearcherBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.sql.ResultSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; /** * Holds the version unit search logic * * @author Noam Y. Tenne * @author Dan Feldman */ public class VersionUnitSearcher extends SearcherBase<VersionUnitSearchControls, VersionUnitSearchResult> { private static final Logger log = LoggerFactory.getLogger(VersionUnitSearcher.class); //The search will stop in any case if this threshold is exceeded (above the declared query limit) public static final int EXCEEDED_QUERY_THRESHOLD = 500; //5K (default) records limit public static final int QUERY_LIMIT = 5 * ConstantValues.searchUserQueryLimit.getInt(); private AqlService aqlService; private AuthorizationService authorizationService; private HashMultimap<ModuleInfo, RepoPath> moduleInfoToRepoPaths = HashMultimap.create(); private Repo repo = null; private boolean resultsWereFiltered = false; private boolean searchHadErrors = false; public VersionUnitSearcher(AqlService aqlService, AuthorizationService authorizationService) { super(); this.aqlService = aqlService; this.authorizationService = authorizationService; } @Override public VersionSearchResults doSearch(VersionUnitSearchControls controls) { List<AqlSearchablePath> pathsToSearch = AqlUtils.getSearchablePathForCurrentFolderAndSubfolders( controls.getPathToSearchWithin()); String repoKey = controls.getPathToSearchWithin().getRepoKey(); repo = getRepoService().repositoryByKey(repoKey); if (repo == null || pathsToSearch.size() < 1) { log.error(repo == null ? "No such repo '" + repoKey + "' to search in" : "path '" + controls.getPathToSearchWithin() + "' does not exist"); return new VersionSearchResults(Sets.newHashSet(), 0, false, false, true); } List<RepoPath> results = searchPathsForFiles(pathsToSearch); results.stream().forEach(this::createModuleInfoAndInsertToMap); Set<VersionUnitSearchResult> versions = getVersionUnitResults(); //Query exceeded allowed limit, should warn user boolean exceededLimit = results.size() > QUERY_LIMIT; return new VersionSearchResults(versions, versions.size(), resultsWereFiltered, exceededLimit, searchHadErrors); } private List<RepoPath> searchPathsForFiles(List<AqlSearchablePath> pathsToSearch) { List<RepoPath> foundPaths = Lists.newArrayList(); AqlBase.OrClause pathSearchClause = AqlUtils.getSearchClauseForPaths(pathsToSearch); int currentRowNum = 0; AqlApiItem versionSearch = AqlApiItem.create().filter(pathSearchClause).sortBy(AqlFieldEnum.itemPath).asc(); long start = System.currentTimeMillis(); AqlLazyResult results = aqlService.executeQueryLazy(versionSearch); ResultSet resultSet = results.getResultSet(); try { RepoPath lastPath = null; while (resultSet.next()) { String repo = resultSet.getString("repo"); String path = resultSet.getString("node_path"); String name = resultSet.getString("node_name"); if (StringUtils.isBlank(repo) || StringUtils.isBlank(path) || StringUtils.isBlank(name)) { log.debug("Got bad item info from query row: repo: {}, path: {}, name: {}", repo, path, name); continue; } RepoPath currentPath = AqlUtils.fromAql(repo, path, name); currentRowNum++; if (shouldStopSearch(currentRowNum, lastPath, currentPath)) { log.trace("Stopping version search, conditions met. current row is {}, limit is {}, " + "current path is {}, last path was {}", currentRowNum, QUERY_LIMIT, currentPath, lastPath); break; } lastPath = currentPath; foundPaths.add(currentPath); } } catch (Exception e) { log.error("Error executing version search query: {}", e.getMessage()); log.debug("Caught exception while executing version search query: ", e); searchHadErrors = true; } finally { AqlUtils.closeResultSet(resultSet); } log.trace("Version search finished successfully, took {} ms", System.currentTimeMillis() - start); return foundPaths; } /** * This mechanism tries to include all found artifacts in the current path before terminating the search to * catch edge cases where the limit would cause only some of the version's files to be returned. * In any case going over the threshold will terminate the search so that it doesn't go over very very large trees. */ private boolean shouldStopSearch(int currentRowNum, RepoPath lastPath, RepoPath currentPath) { //limit + 1 to signify search exceeded max allowable results return ((currentRowNum > QUERY_LIMIT + 1) && canStopSearch(lastPath, currentPath)) || (currentRowNum > QUERY_LIMIT + EXCEEDED_QUERY_THRESHOLD); } private boolean canStopSearch(RepoPath lastPath, RepoPath currentPath) { if (lastPath == null || currentPath == null || lastPath.isRoot() || currentPath.isRoot() || lastPath.getParent() == null || currentPath.getParent() == null) { return false; } return !lastPath.getParent().getPath().equalsIgnoreCase(currentPath.getParent().getPath()); } private void createModuleInfoAndInsertToMap(RepoPath path) { ModuleInfo moduleInfo = repo.getItemModuleInfo(path.getPath()); if (moduleInfo.isValid()) { ModuleInfo stripped = stripModuleInfoFromUnnecessaryData(moduleInfo); moduleInfoToRepoPaths.put(stripped, path); } } private Set<VersionUnitSearchResult> getVersionUnitResults() { Set<VersionUnitSearchResult> searchResults = Sets.newHashSet(); searchResults.addAll(moduleInfoToRepoPaths.keySet() .stream() .map(this::buildSearchResult) .filter(versionUnitSearchResult -> versionUnitSearchResult != null) //Build function might return nulls .collect(Collectors.toList())); return searchResults; } private VersionUnitSearchResult buildSearchResult(ModuleInfo moduleInfo) { Set<RepoPath> modulePaths = moduleInfoToRepoPaths.get(moduleInfo); //User doesn't have permissions to delete some \ all files of this module - don't return a result for it if (modulePaths.stream().filter(authorizationService::canDelete).count() != modulePaths.size()) { resultsWereFiltered = true; //warn user log.debug("Auth service filtered results for user {}, and module {}", authorizationService.currentUsername(), moduleInfo.getPrettyModuleId()); return null; } return new VersionUnitSearchResult(new VersionUnit(moduleInfo, Sets.newHashSet(modulePaths))); } private ModuleInfo stripModuleInfoFromUnnecessaryData(ModuleInfo moduleInfo) { ModuleInfoBuilder moduleInfoBuilder = new ModuleInfoBuilder().organization(moduleInfo.getOrganization()). module(moduleInfo.getModule()).baseRevision(moduleInfo.getBaseRevision()); if (moduleInfo.isIntegration()) { String pathRevision = moduleInfo.getFolderIntegrationRevision(); String artifactRevision = moduleInfo.getFileIntegrationRevision(); boolean hasPathRevision = StringUtils.isNotBlank(pathRevision); boolean hasArtifactRevision = StringUtils.isNotBlank(artifactRevision); if (hasPathRevision && !hasArtifactRevision) { moduleInfoBuilder.folderIntegrationRevision(pathRevision); moduleInfoBuilder.fileIntegrationRevision(pathRevision); } else if (!hasPathRevision && hasArtifactRevision) { moduleInfoBuilder.fileIntegrationRevision(artifactRevision); moduleInfoBuilder.folderIntegrationRevision(artifactRevision); } else { moduleInfoBuilder.folderIntegrationRevision(pathRevision); moduleInfoBuilder.fileIntegrationRevision(artifactRevision); } } return moduleInfoBuilder.build(); } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.impl.auth; import java.io.IOException; import java.security.Principal; import org.apache.http.HttpException; import org.apache.http.HttpHost; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.auth.AuthScheme; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.params.AuthPolicy; import org.apache.http.client.params.ClientPNames; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.localserver.BasicServerTestBase; import org.apache.http.localserver.LocalTestServer; import org.apache.http.message.BasicHeader; import org.apache.http.params.HttpParams; import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpRequestHandler; import org.apache.http.util.EntityUtils; import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSCredential; import org.ietf.jgss.GSSManager; import org.ietf.jgss.GSSName; import org.ietf.jgss.Oid; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Matchers; import org.mockito.Mockito; /** * Tests for {@link NegotiateScheme}. */ public class TestSPNegoScheme extends BasicServerTestBase { @Before public void setUp() throws Exception { this.localServer = new LocalTestServer(null, null); this.localServer.registerDefaultHandlers(); this.localServer.start(); this.httpclient = new DefaultHttpClient(); } /** * This service will continue to ask for authentication. */ private static class PleaseNegotiateService implements HttpRequestHandler { public void handle( final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { response.setStatusCode(HttpStatus.SC_UNAUTHORIZED); response.addHeader(new BasicHeader("WWW-Authenticate", "Negotiate blablabla")); response.addHeader(new BasicHeader("Connection", "Keep-Alive")); response.setEntity(new StringEntity("auth required ")); } } /** * NegotatieScheme with a custom GSSManager that does not require any Jaas or * Kerberos configuration. * */ private static class NegotiateSchemeWithMockGssManager extends SPNegoScheme { GSSManager manager = Mockito.mock(GSSManager.class); GSSName name = Mockito.mock(GSSName.class); GSSContext context = Mockito.mock(GSSContext.class); NegotiateSchemeWithMockGssManager() throws Exception { super(true); Mockito.when(context.initSecContext( Matchers.any(byte[].class), Matchers.anyInt(), Matchers.anyInt())) .thenReturn("12345678".getBytes()); Mockito.when(manager.createName( Matchers.any(String.class), Matchers.any(Oid.class))) .thenReturn(name); Mockito.when(manager.createContext( Matchers.any(GSSName.class), Matchers.any(Oid.class), Matchers.any(GSSCredential.class), Matchers.anyInt())) .thenReturn(context); } @Override protected GSSManager getManager() { return manager; } } private static class UseJaasCredentials implements Credentials { public String getPassword() { return null; } public Principal getUserPrincipal() { return null; } } private static class NegotiateSchemeFactoryWithMockGssManager extends SPNegoSchemeFactory { NegotiateSchemeWithMockGssManager scheme; NegotiateSchemeFactoryWithMockGssManager() throws Exception { scheme = new NegotiateSchemeWithMockGssManager(); } @Override public AuthScheme newInstance(HttpParams params) { return scheme; } } /** * Tests that the client will stop connecting to the server if * the server still keep asking for a valid ticket. */ @Test public void testDontTryToAuthenticateEndlessly() throws Exception { int port = this.localServer.getServiceAddress().getPort(); this.localServer.register("*", new PleaseNegotiateService()); HttpHost target = new HttpHost("localhost", port); SPNegoSchemeFactory nsf = new NegotiateSchemeFactoryWithMockGssManager(); this.httpclient.getAuthSchemes().register(AuthPolicy.SPNEGO, nsf); Credentials use_jaas_creds = new UseJaasCredentials(); this.httpclient.getCredentialsProvider().setCredentials( new AuthScope(null, -1, null), use_jaas_creds); this.httpclient.getParams().setParameter(ClientPNames.DEFAULT_HOST, target); String s = "/path"; HttpGet httpget = new HttpGet(s); HttpResponse response = this.httpclient.execute(httpget); EntityUtils.consume(response.getEntity()); Assert.assertEquals(HttpStatus.SC_UNAUTHORIZED, response.getStatusLine().getStatusCode()); } /** * Javadoc specifies that {@link GSSContext#initSecContext(byte[], int, int)} can return null * if no token is generated. Client should be able to deal with this response. */ @Test public void testNoTokenGeneratedError() throws Exception { int port = this.localServer.getServiceAddress().getPort(); this.localServer.register("*", new PleaseNegotiateService()); HttpHost target = new HttpHost("localhost", port); NegotiateSchemeFactoryWithMockGssManager nsf = new NegotiateSchemeFactoryWithMockGssManager(); this.httpclient.getAuthSchemes().register(AuthPolicy.SPNEGO, nsf); Credentials use_jaas_creds = new UseJaasCredentials(); this.httpclient.getCredentialsProvider().setCredentials( new AuthScope(null, -1, null), use_jaas_creds); this.httpclient.getParams().setParameter(ClientPNames.DEFAULT_HOST, target); String s = "/path"; HttpGet httpget = new HttpGet(s); HttpResponse response = this.httpclient.execute(httpget); EntityUtils.consume(response.getEntity()); Assert.assertEquals(HttpStatus.SC_UNAUTHORIZED, response.getStatusLine().getStatusCode()); } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.developh.misland; import com.developh.misland.CollisionParameters.HitType; import com.developh.misland.GameObject.ActionType; import com.developh.misland.GameObject.Team; import com.developh.misland.GameObjectFactory.GameObjectType; /** * A general-purpose component that responds to dynamic collision notifications. This component * may be configured to produce common responses to hit (taking damage, being knocked back, etc), or * it can be derived for entirely different responses. This component must exist on an object for * that object to respond to dynamic collisions. */ public class HitReactionComponent extends GameComponent { private static final float ATTACK_PAUSE_DELAY = (1.0f / 60) * 4; private final static float DEFAULT_BOUNCE_MAGNITUDE = 200.0f; private final static float EVENT_SEND_DELAY = 5.0f; private boolean mPauseOnAttack; private float mPauseOnAttackTime; private boolean mBounceOnHit; private float mBounceMagnitude; private float mInvincibleAfterHitTime; private float mLastHitTime; private boolean mInvincible; private boolean mDieOnCollect; private boolean mDieOnAttack; private ChangeComponentsComponent mPossessionComponent; private InventoryComponent.UpdateRecord mInventoryUpdate; private LauncherComponent mLauncherComponent; private int mLauncherHitType; private float mInvincibleTime; private int mGameEventHitType; private int mGameEventOnHit; private int mGameEventIndexData; private float mLastGameEventTime; private boolean mForceInvincibility; private SoundSystem.Sound mTakeHitSound; private SoundSystem.Sound mDealHitSound; private int mDealHitSoundHitType; private int mTakeHitSoundHitType; private GameObjectFactory.GameObjectType mSpawnOnDealHitObjectType; private int mSpawnOnDealHitHitType; private boolean mAlignDealHitObjectToVictimX; private boolean mAlignDealHitObjectToVictimY; public HitReactionComponent() { super(); reset(); setPhase(ComponentPhases.PRE_DRAW.ordinal()); } @Override public void reset() { mPauseOnAttack = false; mPauseOnAttackTime = ATTACK_PAUSE_DELAY; mBounceOnHit = false; mBounceMagnitude = DEFAULT_BOUNCE_MAGNITUDE; mInvincibleAfterHitTime = 0.0f; mInvincible = false; mDieOnCollect = false; mDieOnAttack = false; mPossessionComponent = null; mInventoryUpdate = null; mLauncherComponent = null; mLauncherHitType = HitType.LAUNCH; mInvincibleTime = 0.0f; mGameEventOnHit = -1; mGameEventIndexData = 0; mLastGameEventTime = -1.0f; mGameEventHitType = CollisionParameters.HitType.INVALID; mForceInvincibility = false; mTakeHitSound = null; mDealHitSound = null; mSpawnOnDealHitObjectType = GameObjectType.INVALID; mSpawnOnDealHitHitType = CollisionParameters.HitType.INVALID; mDealHitSoundHitType = CollisionParameters.HitType.INVALID; mAlignDealHitObjectToVictimX = false; mAlignDealHitObjectToVictimY = false; } /** Called when this object attacks another object. */ public void hitVictim(GameObject parent, GameObject victim, int hitType, boolean hitAccepted) { if (hitAccepted) { if (mPauseOnAttack && hitType == CollisionParameters.HitType.HIT) { TimeSystem time = sSystemRegistry.timeSystem; time.freeze(mPauseOnAttackTime); } if (mDieOnAttack) { parent.life = 0; } if (hitType == mLauncherHitType && mLauncherComponent != null) { mLauncherComponent.prepareToLaunch(victim, parent); } if (mDealHitSound != null && (hitType == mDealHitSoundHitType || mDealHitSoundHitType == CollisionParameters.HitType.INVALID)) { SoundSystem sound = sSystemRegistry.soundSystem; if (sound != null) { sound.play(mDealHitSound, false, SoundSystem.PRIORITY_NORMAL); } } if (mSpawnOnDealHitObjectType != GameObjectType.INVALID && hitType == mSpawnOnDealHitHitType) { final float x = mAlignDealHitObjectToVictimX ? victim.getPosition().x : parent.getPosition().x; final float y = mAlignDealHitObjectToVictimY ? victim.getPosition().y : parent.getPosition().y; GameObjectFactory factory = sSystemRegistry.gameObjectFactory; GameObjectManager manager = sSystemRegistry.gameObjectManager; if (factory != null) { GameObject object = factory.spawn(mSpawnOnDealHitObjectType, x, y, parent.facingDirection.x < 0.0f); if (object != null && manager != null) { manager.add(object); } } } } } /** Called when this object is hit by another object. */ public boolean receivedHit(GameObject parent, GameObject attacker, int hitType) { final TimeSystem time = sSystemRegistry.timeSystem; final float gameTime = time.getGameTime(); if (mGameEventHitType == hitType && mGameEventHitType != CollisionParameters.HitType.INVALID ) { if (mLastGameEventTime < 0.0f || gameTime > mLastGameEventTime + EVENT_SEND_DELAY) { LevelSystem level = sSystemRegistry.levelSystem; level.sendGameEvent(mGameEventOnHit, mGameEventIndexData, true); } else { // special case. If we're waiting for a hit type to spawn an event and // another event has just happened, eat this hit so we don't miss // the chance to send the event. hitType = CollisionParameters.HitType.INVALID; } mLastGameEventTime = gameTime; } switch(hitType) { case CollisionParameters.HitType.INVALID: break; case CollisionParameters.HitType.HIT: // don't hit our friends, if we have friends. final boolean sameTeam = (parent.team == attacker.team && parent.team != Team.NONE); if (!mForceInvincibility && !mInvincible && parent.life > 0 && !sameTeam) { parent.life -= 1; if (mBounceOnHit && parent.life > 0) { VectorPool pool = sSystemRegistry.vectorPool; Vector2 newVelocity = pool.allocate(parent.getPosition()); newVelocity.subtract(attacker.getPosition()); newVelocity.set(0.5f * Utils.sign(newVelocity.x), 0.5f * Utils.sign(newVelocity.y)); newVelocity.multiply(mBounceMagnitude); parent.setVelocity(newVelocity); parent.getTargetVelocity().zero(); pool.release(newVelocity); } if (mInvincibleAfterHitTime > 0.0f) { mInvincible = true; mInvincibleTime = mInvincibleAfterHitTime; } } else { // Ignore this hit. hitType = CollisionParameters.HitType.INVALID; } break; case CollisionParameters.HitType.DEATH: // respect teams? parent.life = 0; break; case CollisionParameters.HitType.COLLECT: if (mInventoryUpdate != null && parent.life > 0) { InventoryComponent attackerInventory = attacker.findByClass(InventoryComponent.class); if (attackerInventory != null) { attackerInventory.applyUpdate(mInventoryUpdate); } } if (mDieOnCollect && parent.life > 0) { parent.life = 0; } break; case CollisionParameters.HitType.POSSESS: if (mPossessionComponent != null && parent.life > 0 && attacker.life > 0) { mPossessionComponent.activate(parent); } else { hitType = CollisionParameters.HitType.INVALID; } break; case CollisionParameters.HitType.LAUNCH: break; default: break; } if (hitType != CollisionParameters.HitType.INVALID) { if (mTakeHitSound != null && hitType == mTakeHitSoundHitType) { SoundSystem sound = sSystemRegistry.soundSystem; if (sound != null) { sound.play(mTakeHitSound, false, SoundSystem.PRIORITY_NORMAL); } } mLastHitTime = gameTime; parent.setCurrentAction(ActionType.HIT_REACT); parent.lastReceivedHitType = hitType; } return hitType != CollisionParameters.HitType.INVALID; } @Override public void update(float timeDelta, BaseObject parent) { GameObject parentObject = (GameObject)parent; TimeSystem time = sSystemRegistry.timeSystem; final float gameTime = time.getGameTime(); if (mInvincible && mInvincibleTime > 0) { if (time.getGameTime() > mLastHitTime + mInvincibleTime) { mInvincible = false; } } // This means that the lastReceivedHitType will persist for two frames, giving all systems // a chance to react. if (gameTime - mLastHitTime > timeDelta) { parentObject.lastReceivedHitType = CollisionParameters.HitType.INVALID; } } public void setPauseOnAttack(boolean pause) { mPauseOnAttack = pause; } public void setPauseOnAttackTime(float seconds) { mPauseOnAttackTime = seconds; } public void setBounceOnHit(boolean bounce) { mBounceOnHit = bounce; } public void setBounceMagnitude(float magnitude) { mBounceMagnitude = magnitude; } public void setInvincibleTime(float time) { mInvincibleAfterHitTime = time; } public void setDieWhenCollected(boolean die) { mDieOnCollect = true; } public void setDieOnAttack(boolean die) { mDieOnAttack = die; } public void setInvincible(boolean invincible) { mInvincible = invincible; } public void setPossessionComponent(ChangeComponentsComponent component) { mPossessionComponent = component; } public void setInventoryUpdate(InventoryComponent.UpdateRecord update) { mInventoryUpdate = update; } public void setLauncherComponent(LauncherComponent component, int launchHitType) { mLauncherComponent = component; mLauncherHitType = launchHitType; } public void setSpawnGameEventOnHit(int hitType, int gameFlowEventType, int indexData) { mGameEventHitType = hitType; mGameEventOnHit = gameFlowEventType; mGameEventIndexData = indexData; if (hitType == HitType.INVALID) { // The game event has been cleared, so reset the timer blocking a // subsequent event. mLastGameEventTime = -1.0f; } } public final void setForceInvincible(boolean force) { mForceInvincibility = force; } public final void setTakeHitSound(int hitType, SoundSystem.Sound sound) { mTakeHitSoundHitType = hitType; mTakeHitSound = sound; } public final void setDealHitSound(int hitType, SoundSystem.Sound sound) { mDealHitSound = sound; mDealHitSoundHitType = hitType; } public final void setSpawnOnDealHit(int hitType, GameObjectType objectType, boolean alignToVictimX, boolean alignToVicitmY) { mSpawnOnDealHitObjectType = objectType; mSpawnOnDealHitHitType = hitType; mAlignDealHitObjectToVictimX = alignToVictimX; mAlignDealHitObjectToVictimY = alignToVicitmY; } }
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.reteoo; import org.drools.core.RuleBaseConfiguration; import org.drools.core.base.DroolsQuery; import org.drools.core.base.InternalViewChangedEventListener; import org.drools.core.base.extractors.ArrayElementReader; import org.drools.core.beliefsystem.BeliefSet; import org.drools.core.beliefsystem.jtms.JTMSBeliefSetImpl.MODE; import org.drools.core.common.InternalFactHandle; import org.drools.core.common.InternalWorkingMemory; import org.drools.core.common.InternalWorkingMemoryActions; import org.drools.core.common.LeftTupleSets; import org.drools.core.common.LeftTupleSetsImpl; import org.drools.core.common.Memory; import org.drools.core.common.MemoryFactory; import org.drools.core.common.ObjectStore; import org.drools.core.common.QueryElementFactHandle; import org.drools.core.common.UpdateContext; import org.drools.core.definitions.rule.impl.RuleImpl; import org.drools.core.marshalling.impl.PersisterHelper; import org.drools.core.marshalling.impl.ProtobufInputMarshaller.QueryElementContext; import org.drools.core.marshalling.impl.ProtobufInputMarshaller.TupleKey; import org.drools.core.marshalling.impl.ProtobufMessages; import org.drools.core.phreak.StackEntry; import org.drools.core.reteoo.builder.BuildContext; import org.drools.core.rule.AbductiveQuery; import org.drools.core.rule.Declaration; import org.drools.core.rule.QueryElement; import org.drools.core.rule.QueryImpl; import org.drools.core.spi.PropagationContext; import org.drools.core.util.AbstractBaseLinkedListNode; import org.kie.api.runtime.rule.Variable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.List; import java.util.Map; public class QueryElementNode extends LeftTupleSource implements LeftTupleSinkNode, MemoryFactory { private LeftTupleSinkNode previousTupleSinkNode; private LeftTupleSinkNode nextTupleSinkNode; protected QueryElement queryElement; private boolean tupleMemoryEnabled; protected boolean openQuery; private boolean dataDriven; private Object[] argsTemplate; public QueryElementNode() { // for serialization } public QueryElementNode(final int id, final LeftTupleSource tupleSource, final QueryElement queryElement, final boolean tupleMemoryEnabled, final boolean openQuery, final BuildContext context) { super(id, context); setLeftTupleSource(tupleSource); this.queryElement = queryElement; this.tupleMemoryEnabled = tupleMemoryEnabled; this.openQuery = openQuery; this.dataDriven = context != null && context.getRule().isDataDriven(); initMasks( context, tupleSource ); initArgsTemplate( context ); } private void initArgsTemplate(BuildContext context) { Object[] originalArgs = this.queryElement.getArgTemplate(); argsTemplate = new Object[originalArgs.length]; for (int i = 0; i < originalArgs.length; i++) { if (originalArgs[i] instanceof Class) { try { // Class literals have to be normalized to the classes loaded from the current kbase's ClassLoader argsTemplate[i] = context.getKnowledgeBase().getRootClassLoader().loadClass(((Class)originalArgs[i]).getName()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } else { argsTemplate[i] = originalArgs[i]; } } } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal( in ); queryElement = (QueryElement) in.readObject(); tupleMemoryEnabled = in.readBoolean(); openQuery = in.readBoolean(); dataDriven = in.readBoolean(); this.argsTemplate = (Object[]) in.readObject(); for ( int i = 0; i < argsTemplate.length; i++ ) { if ( argsTemplate[i] instanceof Variable ) { argsTemplate[i] = Variable.v; // we need to reset this as we do == checks later in DroolsQuery } } } public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal( out ); out.writeObject( queryElement ); out.writeBoolean( tupleMemoryEnabled ); out.writeBoolean( openQuery ); out.writeBoolean( dataDriven ); out.writeObject( argsTemplate ); } public void networkUpdated(UpdateContext updateContext) { this.leftInput.networkUpdated(updateContext); } public short getType() { return NodeTypeEnums.UnificationNode; } public boolean isLeftTupleMemoryEnabled() { return false; } public QueryElement getQueryElement() { return queryElement; } public boolean isOpenQuery() { return openQuery; } @SuppressWarnings("unchecked") public InternalFactHandle createFactHandle(final PropagationContext context, final InternalWorkingMemory workingMemory, final LeftTuple leftTuple ) { ProtobufMessages.FactHandle _handle = null; if( context.getReaderContext() != null ) { Map<TupleKey, QueryElementContext> map = (Map<TupleKey, QueryElementContext>) context.getReaderContext().nodeMemories.get( getId() ); if( map != null ) { _handle = map.get( PersisterHelper.createTupleKey( leftTuple ) ).handle; } } return _handle != null ? workingMemory.getFactHandleFactory().newFactHandle( _handle.getId(), null, _handle.getRecency(), null, workingMemory, workingMemory ) : workingMemory.getFactHandleFactory().newFactHandle( null, null, workingMemory, workingMemory ); } public DroolsQuery createDroolsQuery(LeftTuple leftTuple, InternalFactHandle handle, StackEntry stackEntry, final List<PathMemory> pmems, QueryElementNodeMemory qmem, LeftTupleSets trgLeftTuples, LeftTupleSink sink, InternalWorkingMemory workingMemory) { Object[] args = new Object[argsTemplate.length]; // the actual args, to be created from the template // first copy everything, so that we get the literals. We will rewrite the declarations and variables next System.arraycopy( argsTemplate, 0, args, 0, args.length ); int[] declIndexes = this.queryElement.getDeclIndexes(); for ( int declIndexe : declIndexes ) { Declaration declr = (Declaration) argsTemplate[declIndexe]; Object tupleObject = leftTuple.get( declr ).getObject(); Object o; if ( tupleObject instanceof DroolsQuery && declr.getExtractor() instanceof ArrayElementReader && ( (DroolsQuery) tupleObject ).getVariables()[declr.getExtractor().getIndex()] != null ) { // If the query passed in a Variable, we need to use it o = Variable.v; } else { o = declr.getValue( workingMemory, tupleObject ); } if ( o == null ) { o = declr.getValue( workingMemory, tupleObject ); } args[declIndexe] = o; } int[] varIndexes = this.queryElement.getVariableIndexes(); for (int varIndex : varIndexes) { if (argsTemplate[varIndex] == Variable.v) { // Need to check against the arg template, as the varIndexes also includes re-declared declarations args[varIndex] = Variable.v; } } UnificationNodeViewChangedEventListener collector = createCollector( leftTuple, varIndexes, this.tupleMemoryEnabled ); boolean executeAsOpenQuery = openQuery; if ( executeAsOpenQuery ) { // There is no point in doing an open query if the caller is a non-open query. Object object = leftTuple.get( 0 ).getObject(); if ( object instanceof DroolsQuery && !((DroolsQuery) object).isOpen() ) { executeAsOpenQuery = false; } } DroolsQuery queryObject = new DroolsQuery( this.queryElement.getQueryName(), args, collector, executeAsOpenQuery, stackEntry, pmems, trgLeftTuples, qmem, sink); collector.setFactHandle( handle ); handle.setObject( queryObject ); leftTuple.setObject( handle ); // so it can be retracted later and destroyed return queryObject; } protected UnificationNodeViewChangedEventListener createCollector( LeftTuple leftTuple, int[] varIndexes, boolean tupleMemoryEnabled ) { return new UnificationNodeViewChangedEventListener( leftTuple, varIndexes, this, tupleMemoryEnabled ); } public LeftTupleSource getLeftTupleSource() { return this.leftInput; } public void setLeftTupleMemoryEnabled(boolean tupleMemoryEnabled) { this.tupleMemoryEnabled = tupleMemoryEnabled; } /** * Returns the next node * @return * The next TupleSinkNode */ public LeftTupleSinkNode getNextLeftTupleSinkNode() { return this.nextTupleSinkNode; } /** * Sets the next node * @param next * The next TupleSinkNode */ public void setNextLeftTupleSinkNode(final LeftTupleSinkNode next) { this.nextTupleSinkNode = next; } /** * Returns the previous node * @return * The previous TupleSinkNode */ public LeftTupleSinkNode getPreviousLeftTupleSinkNode() { return this.previousTupleSinkNode; } /** * Sets the previous node * @param previous * The previous TupleSinkNode */ public void setPreviousLeftTupleSinkNode(final LeftTupleSinkNode previous) { this.previousTupleSinkNode = previous; } public static class UnificationNodeViewChangedEventListener implements InternalViewChangedEventListener { protected LeftTuple leftTuple; protected QueryElementNode node; protected InternalFactHandle factHandle; protected int[] variables; protected boolean tupleMemoryEnabled; public UnificationNodeViewChangedEventListener(LeftTuple leftTuple, int[] variables, QueryElementNode node, boolean tupleMemoryEnabled) { this.leftTuple = leftTuple; this.variables = variables; this.node = node; this.tupleMemoryEnabled = tupleMemoryEnabled; } public InternalFactHandle getFactHandle() { return factHandle; } public void setFactHandle(InternalFactHandle factHandle) { this.factHandle = factHandle; } public void setVariables(int[] variables) { this.variables = variables; } public void rowAdded(final RuleImpl rule, LeftTuple resultLeftTuple, PropagationContext context, InternalWorkingMemory workingMemory) { QueryTerminalNode node = (QueryTerminalNode) resultLeftTuple.getLeftTupleSink(); QueryImpl query = node.getQuery(); Declaration[] decls = node.getDeclarations(); DroolsQuery dquery = (DroolsQuery) this.factHandle.getObject(); Object[] objects = new Object[ determineResultSize( query, dquery ) ]; Declaration decl; for (int variable : this.variables) { decl = decls[variable]; objects[variable] = decl.getValue(workingMemory, resultLeftTuple.get(decl).getObject()); } QueryElementFactHandle resultHandle = createQueryResultHandle(context, workingMemory, objects); RightTuple rightTuple = createResultRightTuple(resultHandle, resultLeftTuple, dquery.isOpen()); boolean pass = true; if ( query.isAbductive() ) { AbductiveQuery aq = (( AbductiveQuery) query ); int numArgs = aq.getAbducibleArgs().length; Object[] constructorArgs = new Object[ aq.getAbducibleArgs().length ]; for ( int j = 0; j < numArgs; j++ ) { int k = aq.mapArgToParam( j ); if ( objects[ k ] != null ) { constructorArgs[ j ] = objects[ k ]; } else if ( dquery.getElements()[ k ] != null ) { constructorArgs[ j ] = dquery.getElements()[ k ]; } } Object abduced = aq.abduce( constructorArgs ); if ( abduced != null ) { boolean firstAssertion = true; ObjectStore store = workingMemory.getObjectStore(); InternalFactHandle handle = store.getHandleForObject( abduced ); if ( handle != null ) { abduced = handle.getObject(); firstAssertion = false; } else { handle = ((InternalWorkingMemoryActions) workingMemory).getTruthMaintenanceSystem().insert( abduced, MODE.POSITIVE.getId(), query, (RuleTerminalNodeLeftTuple) resultLeftTuple ); } BeliefSet bs = handle.getEqualityKey() != null ? handle.getEqualityKey().getBeliefSet() : null; if ( bs == null ) { abduced = handle.getObject(); } else { if ( ! bs.isPositive() ) { pass = false; } else { if ( !firstAssertion ) { ( (InternalWorkingMemoryActions) workingMemory ).getTruthMaintenanceSystem().insert( abduced, MODE.POSITIVE.getId(), query, (RuleTerminalNodeLeftTuple) resultLeftTuple ); } } } } objects[ objects.length - 1 ] = abduced; } if ( pass ) { LeftTupleSink sink = dquery.getLeftTupleSink(); LeftTuple childLeftTuple = sink.createLeftTuple( this.leftTuple, rightTuple, sink ); boolean stagedInsertWasEmpty = dquery.getResultLeftTupleSets().addInsert(childLeftTuple); if ( stagedInsertWasEmpty ) { dquery.getQueryNodeMemory().setNodeDirtyWithoutNotify(); } } } private int determineResultSize( QueryImpl query, DroolsQuery dquery ) { int size = dquery.getElements().length; if (query.isAbductive() && (( AbductiveQuery ) query ).isReturnBound()) { size++; } return size; } protected RightTuple createResultRightTuple( QueryElementFactHandle resultHandle, LeftTuple resultLeftTuple, boolean open ) { RightTuple rightTuple = new RightTuple( resultHandle ); if ( open ) { rightTuple.setLeftTuple( resultLeftTuple ); resultLeftTuple.setObject( rightTuple ); } rightTuple.setPropagationContext( resultLeftTuple.getPropagationContext() ); return rightTuple; } @SuppressWarnings("unchecked") protected QueryElementFactHandle createQueryResultHandle(PropagationContext context, InternalWorkingMemory workingMemory, Object[] objects) { ProtobufMessages.FactHandle _handle = null; if( context.getReaderContext() != null ) { Map<TupleKey, QueryElementContext> map = (Map<TupleKey, QueryElementContext>) context.getReaderContext().nodeMemories.get( node.getId() ); if( map != null ) { QueryElementContext _context = map.get( PersisterHelper.createTupleKey( leftTuple ) ); if( _context != null ) { _handle = _context.results.removeFirst(); } } } return _handle != null ? new QueryElementFactHandle( objects, _handle.getId(), _handle.getRecency() ) : new QueryElementFactHandle( objects, workingMemory.getFactHandleFactory().getAtomicId().incrementAndGet(), workingMemory.getFactHandleFactory().getAtomicRecency().incrementAndGet() ); } public void rowRemoved(final RuleImpl rule, final LeftTuple resultLeftTuple, final PropagationContext context, final InternalWorkingMemory workingMemory) { RightTuple rightTuple = (RightTuple) resultLeftTuple.getObject(); rightTuple.setLeftTuple( null ); resultLeftTuple.setObject( null ); DroolsQuery query = (DroolsQuery) this.factHandle.getObject(); LeftTupleSets leftTuples = query.getResultLeftTupleSets(); LeftTuple childLeftTuple = rightTuple.getFirstChild(); switch ( childLeftTuple.getStagedType() ) { // handle clash with already staged entries case LeftTuple.INSERT : leftTuples.removeInsert( childLeftTuple ); break; case LeftTuple.UPDATE : leftTuples.removeUpdate( childLeftTuple ); break; } leftTuples.addDelete( childLeftTuple ); childLeftTuple.unlinkFromRightParent(); childLeftTuple.unlinkFromLeftParent(); } public void rowUpdated(final RuleImpl rule, final LeftTuple resultLeftTuple, final PropagationContext context, final InternalWorkingMemory workingMemory) { RightTuple rightTuple = (RightTuple) resultLeftTuple.getObject(); if ( rightTuple.getMemory() != null ) { // Already sheduled as an insert return; } rightTuple.setLeftTuple( null ); resultLeftTuple.setObject( null ); // We need to recopy everything back again, as we don't know what has or hasn't changed QueryTerminalNode node = (QueryTerminalNode) resultLeftTuple.getLeftTupleSink(); Declaration[] decls = node.getDeclarations(); InternalFactHandle rootHandle = resultLeftTuple.get( 0 ); DroolsQuery dquery = (DroolsQuery) rootHandle.getObject(); Object[] objects = new Object[dquery.getElements().length]; Declaration decl; for (int variable : this.variables) { decl = decls[variable]; objects[variable] = decl.getValue(workingMemory, resultLeftTuple.get(decl).getObject()); } QueryElementFactHandle handle = (QueryElementFactHandle) rightTuple.getFactHandle(); handle.setRecency(workingMemory.getFactHandleFactory().getAtomicRecency().incrementAndGet()); handle.setObject( objects ); if ( dquery.isOpen() ) { rightTuple.setLeftTuple( resultLeftTuple ); resultLeftTuple.setObject( rightTuple ); } LeftTupleSets leftTuples = dquery.getResultLeftTupleSets(); LeftTuple childLeftTuple = rightTuple.getFirstChild(); switch ( childLeftTuple.getStagedType() ) { // handle clash with already staged entries case LeftTuple.INSERT : leftTuples.removeInsert( childLeftTuple ); break; case LeftTuple.UPDATE : leftTuples.removeUpdate( childLeftTuple ); break; } leftTuples.addUpdate( childLeftTuple ); } public List<?> getResults() { throw new UnsupportedOperationException( getClass().getCanonicalName() + " does not support the getResults() method." ); } public LeftTuple getLeftTuple() { return leftTuple; } } public LeftTuple createLeftTuple(InternalFactHandle factHandle, LeftTupleSink sink, boolean leftTupleMemoryEnabled) { return new QueryElementNodeLeftTuple( factHandle, sink, leftTupleMemoryEnabled ); } public LeftTuple createLeftTuple(final InternalFactHandle factHandle, final LeftTuple leftTuple, final LeftTupleSink sink) { return new QueryElementNodeLeftTuple(factHandle,leftTuple, sink ); } public LeftTuple createLeftTuple(LeftTuple leftTuple, LeftTupleSink sink, PropagationContext pctx, boolean leftTupleMemoryEnabled) { return new QueryElementNodeLeftTuple( leftTuple, sink, pctx, leftTupleMemoryEnabled ); } public LeftTuple createLeftTuple(LeftTuple leftTuple, RightTuple rightTuple, LeftTupleSink sink) { return new QueryElementNodeLeftTuple( leftTuple, rightTuple, sink ); } public LeftTuple createLeftTuple(LeftTuple leftTuple, RightTuple rightTuple, LeftTuple currentLeftChild, LeftTuple currentRightChild, LeftTupleSink sink, boolean leftTupleMemoryEnabled) { return new QueryElementNodeLeftTuple( leftTuple, rightTuple, currentLeftChild, currentRightChild, sink, leftTupleMemoryEnabled ); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + (openQuery ? 1231 : 1237); result = prime * result + ((queryElement == null) ? 0 : queryElement.hashCode()); result = prime * result + ((leftInput == null) ? 0 : leftInput.hashCode()); return result; } @Override public boolean equals(Object obj) { if ( this == obj ) return true; if ( obj == null ) return false; if ( getClass() != obj.getClass() ) return false; QueryElementNode other = (QueryElementNode) obj; if ( openQuery != other.openQuery ) return false; if ( !openQuery && dataDriven != other.dataDriven ) return false; if ( queryElement == null ) { if ( other.queryElement != null ) return false; } else if ( !queryElement.equals( other.queryElement ) ) return false; if ( leftInput == null ) { if ( other.leftInput != null ) return false; } else if ( !leftInput.equals( other.leftInput ) ) return false; return true; } public Memory createMemory(RuleBaseConfiguration config, InternalWorkingMemory wm) { return new QueryElementNodeMemory(this); } public static class QueryElementNodeMemory extends AbstractBaseLinkedListNode<Memory> implements Memory { private QueryElementNode node; private SegmentMemory smem; private SegmentMemory querySegmentMemory; private LeftTupleSets resultLeftTuples; private long nodePosMaskBit; public QueryElementNodeMemory(QueryElementNode node) { this.node = node; // @FIXME I don't think this is thread safe this.resultLeftTuples = new LeftTupleSetsImpl(); } public QueryElementNode getNode() { return this.node; } public short getNodeType() { return NodeTypeEnums.QueryElementNode; } public void setSegmentMemory(SegmentMemory smem) { this.smem = smem; } public SegmentMemory getSegmentMemory() { return smem; } public SegmentMemory getQuerySegmentMemory() { return querySegmentMemory; } public void setQuerySegmentMemory(SegmentMemory querySegmentMemory) { this.querySegmentMemory = querySegmentMemory; } public LeftTupleSets getResultLeftTuples() { return resultLeftTuples; } public long getNodePosMaskBit() { return nodePosMaskBit; } public void setNodePosMaskBit(long segmentPos) { this.nodePosMaskBit = segmentPos; } public void setNodeDirtyWithoutNotify() { smem.updateDirtyNodeMask( nodePosMaskBit ); } public void setNodeCleanWithoutNotify() { smem.updateCleanNodeMask( nodePosMaskBit ); } public void reset() { resultLeftTuples.resetAll(); } } protected ObjectTypeNode getObjectTypeNode() { return leftInput.getObjectTypeNode(); } @Override public LeftTuple createPeer(LeftTuple original) { QueryElementNodeLeftTuple peer = new QueryElementNodeLeftTuple(); peer.initPeer((BaseLeftTuple) original, this); original.setPeer(peer); return peer; } public String toString() { return "[" + this.getClass().getSimpleName() + "(" + this.id + ", " + queryElement.getQueryName() + ")]"; } @Override public void assertLeftTuple(LeftTuple leftTuple, PropagationContext context, InternalWorkingMemory workingMemory) { throw new UnsupportedOperationException(); } @Override public void retractLeftTuple(LeftTuple leftTuple, PropagationContext context, InternalWorkingMemory workingMemory) { throw new UnsupportedOperationException(); } @Override public void modifyLeftTuple(InternalFactHandle factHandle, ModifyPreviousTuples modifyPreviousTuples, PropagationContext context, InternalWorkingMemory workingMemory) { throw new UnsupportedOperationException(); } @Override public void updateSink(LeftTupleSink sink, PropagationContext context, InternalWorkingMemory workingMemory) { throw new UnsupportedOperationException(); } @Override public void modifyLeftTuple(LeftTuple leftTuple, PropagationContext context, InternalWorkingMemory workingMemory) { throw new UnsupportedOperationException(); } public void attach( BuildContext context ) { this.leftInput.addTupleSink( this, context ); } protected boolean doRemove(RuleRemovalContext context, ReteooBuilder builder, InternalWorkingMemory[] workingMemories) { if (!isInUse()) { getLeftTupleSource().removeTupleSink(this); return true; } return false; } }
/* * $Id$ */ /* Copyright (c) 2000-2015 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.plugin.massachusettsmedicalsociety; import java.util.Iterator; import java.util.regex.*; import org.lockss.daemon.*; import org.lockss.extractor.*; import org.lockss.plugin.*; import org.lockss.util.Constants; import org.lockss.util.Logger; /* * HTML Full Text: http://www.nejm.org/doi/full/10.1056/NEJMoa042957 * PDF Full Text: http://www.nejm.org/doi/pdf/10.1056/NEJMoa042957 * Citation (containing metadata): www.nejm.org/action/downloadCitation?format=(ris|endnote|bibTex|medlars|procite|referenceManager)&doi=10.1056%2FNEJMoa042957&include=cit&direct=checked * Supplemental Materials page: http://www.nejm.org/action/showSupplements?doi=10.1056%2FNEJMc1304053 */ public class MassachusettsMedicalSocietyArticleIteratorFactory implements ArticleIteratorFactory, ArticleMetadataExtractorFactory { //define roles for different citation types the key is the ArticleFiles key and the value is the format = value in the url protected static final String ROLE_CITATION_RIS = ArticleFiles.ROLE_CITATION + "Ris"; protected static final String ROLE_CITATION_ENDNOTE = ArticleFiles.ROLE_CITATION + "Endnote"; protected static final String ROLE_CITATION_BIBTEX = ArticleFiles.ROLE_CITATION + "Bibtex"; protected static final String ROLE_CITATION_MEDLARS = ArticleFiles.ROLE_CITATION + "Medlars"; protected static final String ROLE_CITATION_PROCITE = ArticleFiles.ROLE_CITATION + "Procite"; protected static final String ROLE_CITATION_REFMANAGER = ArticleFiles.ROLE_CITATION + "Refmanager"; protected static final String ROLE_SUPPLEMENTARY_MATERIALS = ArticleFiles.ROLE_SUPPLEMENTARY_MATERIALS; protected static Logger log = Logger.getLogger("MassachusettsMedicalSocietyArticleIteratorFactory"); protected static final String ROOT_TEMPLATE = "\"%sdoi\", base_url"; // params from tdb file corresponding to AU protected static final String PATTERN_TEMPLATE = "\"^%sdoi/(full|pdf)/\", base_url"; @Override public Iterator<ArticleFiles> createArticleIterator(ArchivalUnit au, MetadataTarget target) throws PluginException { return new MassachusettsMedicalSocietyArticleIterator(au, new SubTreeArticleIterator.Spec() .setTarget(target) .setRootTemplate(ROOT_TEMPLATE) .setPatternTemplate(PATTERN_TEMPLATE)); } protected static class MassachusettsMedicalSocietyArticleIterator extends SubTreeArticleIterator { protected Pattern HTML_PATTERN = Pattern.compile("/doi/full/(.*)$", Pattern.CASE_INSENSITIVE); protected Pattern PDF_PATTERN = Pattern.compile("/doi/pdf/(.*)$", Pattern.CASE_INSENSITIVE); public MassachusettsMedicalSocietyArticleIterator(ArchivalUnit au, SubTreeArticleIterator.Spec spec) { super(au, spec); } @Override protected ArticleFiles createArticleFiles(CachedUrl cu) { String url = cu.getUrl(); log.debug3("createArticleFiles: cu="+url); Matcher mat; mat = HTML_PATTERN.matcher(url); if (mat.find()) { if(isHtml(cu)) { return processFullTextHtml(cu, mat); } //Check the mime-type. Some full article links point to PDFs instead of HTML pages in older volumes else if(isPdf(cu)) { return processFullTextPdf(cu, mat, true); } } mat = PDF_PATTERN.matcher(url); if (mat.find() && isPdf(cu)) { return processFullTextPdf(cu, mat); } //log.warning("Mismatch between article iterator factory and article iterator: " + url); return null; } protected ArticleFiles processFullTextHtml(CachedUrl htmlCu, Matcher htmlMat) { ArticleFiles af = new ArticleFiles(); log.debug3("setFullTextCu: "+htmlCu); af.setFullTextCu(htmlCu); af.setRoleCu(ArticleFiles.ROLE_FULL_TEXT_HTML, htmlCu); if (spec.getTarget() != MetadataTarget.Article()) { guessFullTextPdf(af, htmlMat); guessCitations(af, htmlMat); guessSupplements(af, htmlMat); } return af; } protected ArticleFiles processFullTextPdf(CachedUrl pdfCu, Matcher pdfMat) { return processFullTextPdf(pdfCu, pdfMat, false); } protected ArticleFiles processFullTextPdf(CachedUrl pdfCu, Matcher pdfMat, boolean isFull) { if(!isFull){ CachedUrl htmlCu = au.makeCachedUrl(pdfMat.replaceFirst("/doi/full/$1")); if (htmlCu != null && htmlCu.hasContent() && isHtml(htmlCu)) { return null; } } ArticleFiles af = new ArticleFiles(); af.setFullTextCu(pdfCu); af.setRoleCu(ArticleFiles.ROLE_FULL_TEXT_PDF, pdfCu); if (spec.getTarget() != MetadataTarget.Article()) { guessCitations(af, pdfMat); guessSupplements(af, pdfMat); } return af; } protected boolean isPdf(CachedUrl cu){ if(cu.getContentType().trim().startsWith(Constants.MIME_TYPE_PDF)) { cu.release(); return true; } cu.release(); return false; } protected boolean isHtml(CachedUrl cu){ if(cu.getContentType().trim().startsWith(Constants.MIME_TYPE_HTML)) { cu.release(); return true; } cu.release(); return false; } protected void guessFullTextPdf(ArticleFiles af, Matcher mat) { CachedUrl pdfCu = au.makeCachedUrl(mat.replaceFirst("/doi/pdf/$1")); if (pdfCu != null && pdfCu.hasContent()) { af.setRoleCu(ArticleFiles.ROLE_FULL_TEXT_PDF, pdfCu); } } protected void guessCitations(ArticleFiles af, Matcher mat) { String citDoi = mat.group(1); CachedUrl primCitCu = null; citDoi = citDoi.replace("/", "%2F"); String[][] citTypes = {{ROLE_CITATION_RIS, "ris"}, {ROLE_CITATION_PROCITE, "procite"}, {ROLE_CITATION_ENDNOTE, "endnote"}, {ROLE_CITATION_BIBTEX, "bibTex"}, {ROLE_CITATION_MEDLARS, "medlars"}, {ROLE_CITATION_REFMANAGER, "referenceManager"}}; for(String[] citType : citTypes){ CachedUrl citCu = au.makeCachedUrl(mat.replaceFirst("/action/downloadCitation?format=" + citType[1] + "&doi=" + citDoi + "&include=cit&direct=checked")); if (citCu != null && citCu.hasContent()) { if(primCitCu == null){ primCitCu = citCu; } af.setRoleCu(citType[0], citCu); } } af.setRoleCu(ArticleFiles.ROLE_CITATION, primCitCu); } // Assigning the Supplementary_Materials role to the url (eg): // http://www.nejm.org/action/showSupplements?doi=10.1056%2FNEJMc1304053 // which contain links to the appendix, disclosures and/or protocol page(s) // Supplementary_Materials landing page doi case matches that of main article // urls (whereas underlying specific supplementary data urls are always lower case protected void guessSupplements(ArticleFiles af, Matcher mat) { String origdoi = mat.group(1); String supDoi = origdoi.replace("/", "%2F"); log.debug3("guessSupplements: "+ "/action/showSupplements?doi="+supDoi); CachedUrl supCu = au.makeCachedUrl( mat.replaceFirst("/action/showSupplements?doi="+supDoi)); if (supCu != null && supCu.hasContent()) { af.setRoleCu(ArticleFiles.ROLE_SUPPLEMENTARY_MATERIALS, supCu); } } } @Override public ArticleMetadataExtractor createArticleMetadataExtractor(MetadataTarget target) throws PluginException { return new BaseArticleMetadataExtractor(ArticleFiles.ROLE_CITATION); } }
package org.jboss.arquillian.junit; import org.jboss.arquillian.junit.event.AfterRules; import org.jboss.arquillian.junit.event.BeforeRules; import org.jboss.arquillian.test.spi.LifecycleMethodExecutor; import org.jboss.arquillian.test.spi.TestMethodExecutor; import org.jboss.arquillian.test.spi.TestResult; import org.jboss.arquillian.test.spi.TestRunnerAdaptor; import org.junit.Assert; import org.junit.Test; import org.junit.runner.Result; import org.junit.runner.RunWith; import org.junit.runner.notification.Failure; import org.junit.runner.notification.RunListener; import org.mockito.internal.stubbing.answers.ThrowsException; import org.mockito.junit.MockitoJUnitRunner; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.isA; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Verify the that JUnit integration adaptor fires the expected events even when Handlers are failing. * * @author <a href="mailto:aslak@redhat.com">Aslak Knutsen</a> * @version $Revision: $ */ @RunWith(MockitoJUnitRunner.class) public class JUnitIntegrationWithRuleTestCase extends JUnitTestBaseClass { @Test public void shouldCallBeforeClassAndAfterClassWithoutLifecycleHandlers() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); when(adaptor.test(isA(TestMethodExecutor.class))).thenReturn(TestResult.passed()); Result result = run(adaptor, ClassWithArquillianClassAndMethodRule.class); Assert.assertTrue(result.wasSuccessful()); assertCycle(1, Cycle.BEFORE_CLASS, Cycle.AFTER_CLASS); assertCycle(0, Cycle.BEFORE, Cycle.AFTER, Cycle.AFTER_RULE, Cycle.BEFORE_RULE); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldCallAllMethods() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); Result result = run(adaptor, ClassWithArquillianClassAndMethodRule.class); Assert.assertTrue(result.wasSuccessful()); assertCycle(1, Cycle.basics()); verify(adaptor, times(1)).fireCustomLifecycle(isA(BeforeRules.class)); verify(adaptor, times(1)).fireCustomLifecycle(isA(AfterRules.class)); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldCallAfterClassWhenBeforeThrowsException() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); throwException(Cycle.BEFORE_CLASS, new Throwable()); Result result = run(adaptor, ClassWithArquillianClassAndMethodRule.class); Assert.assertFalse(result.wasSuccessful()); assertCycle(1, Cycle.BEFORE_CLASS, Cycle.AFTER_CLASS); assertCycle(0, Cycle.BEFORE, Cycle.AFTER, Cycle.TEST); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldCallAfterWhenBeforeThrowsException() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); throwException(Cycle.BEFORE, new Throwable()); Result result = run(adaptor, ClassWithArquillianClassAndMethodRule.class); Assert.assertFalse(result.wasSuccessful()); assertCycle(1, Cycle.BEFORE_CLASS, Cycle.AFTER_CLASS, Cycle.BEFORE, Cycle.AFTER); assertCycle(0, Cycle.TEST); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldOnlyCallBeforeAfterSuiteOnce() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); Result result = run(adaptor, ClassWithArquillianClassAndMethodRule.class, ClassWithArquillianClassAndMethodRule.class, ClassWithArquillianClassAndMethodRule.class, ClassWithArquillianClassAndMethodRule.class); Assert.assertTrue(result.wasSuccessful()); verify(adaptor, times(4)).beforeSuite(); verify(adaptor, times(4)).afterSuite(); } /* * ARQ-391, After not called when Error's are thrown, e.g. AssertionError */ @Test public void shouldCallAllWhenTestThrowsException() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); throwException(Cycle.TEST, new Throwable()); Result result = run(adaptor, ClassWithArquillianClassAndMethodRule.class); Assert.assertFalse(result.wasSuccessful()); assertCycle(1, Cycle.basics()); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldWorkWithTimeout() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); Result result = run(adaptor, ClassWithArquillianClassAndMethodRuleWithTimeout.class); Assert.assertFalse(result.wasSuccessful()); Assert.assertTrue(result.getFailures().get(0).getMessage().contains("timed out")); assertCycle(1, Cycle.BEFORE_CLASS, Cycle.BEFORE, Cycle.AFTER, Cycle.AFTER_CLASS); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldWorkWithExpectedExceptionRule() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); Result result = run(adaptor, ClassWithArquillianClassAndMethodRuleWithExpectedExceptionRule.class); Assert.assertTrue(result.wasSuccessful()); assertCycle(1, Cycle.BEFORE_CLASS, Cycle.BEFORE, Cycle.TEST, Cycle.AFTER, Cycle.AFTER_CLASS); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldWorkWithExpectedException() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); Result result = run(adaptor, ClassWithArquillianClassAndMethodRuleWithExpectedException.class); Assert.assertTrue(result.wasSuccessful()); assertCycle(1, Cycle.BEFORE_CLASS, Cycle.BEFORE, Cycle.TEST, Cycle.AFTER, Cycle.AFTER_CLASS); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldWorkWithAssume() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); final List<Failure> assumptionFailure = new ArrayList<Failure>(); Result result = run(adaptor, new RunListener() { @Override public void testAssumptionFailure(Failure failure) { assumptionFailure.add(failure); } }, ClassWithArquillianClassAndMethodRuleWithAssume.class); Assert.assertEquals(1, assumptionFailure.size()); Assert.assertTrue(result.wasSuccessful()); Assert.assertEquals(0, result.getFailureCount()); Assert.assertEquals(0, result.getIgnoreCount()); assertCycle(1, Cycle.BEFORE_CLASS, Cycle.BEFORE, Cycle.AFTER, Cycle.AFTER_CLASS); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldThrowMultipleExceptionsWhenBeforeAndAfterThrowException() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); Result result = run(adaptor, ClassWithArquillianClassAndMethodRuleWithExceptionInBeforeAndAfter.class); Assert.assertFalse(result.wasSuccessful()); Assert.assertEquals(2, result.getFailureCount()); Assert.assertTrue(result.getFailures().get(0).getMessage().equals("BeforeException")); Assert.assertTrue(result.getFailures().get(1).getMessage().equals("AfterException")); assertCycle(1, Cycle.BEFORE_CLASS, Cycle.BEFORE, Cycle.AFTER, Cycle.AFTER_CLASS); assertCycle(0, Cycle.TEST); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldCallAfterRuleIfFailureInBeforeRule() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); Result result = run(adaptor, ClassWithArquillianClassAndMethodRuleWithExceptionInBeforeRule.class); Assert.assertFalse(result.wasSuccessful()); Assert.assertEquals(1, result.getFailureCount()); Assert.assertTrue(result.getFailures().get(0).getMessage().equals("BeforeRuleException")); assertCycle(1, Cycle.BEFORE_CLASS, Cycle.AFTER_CLASS); assertCycle(0, Cycle.BEFORE, Cycle.TEST, Cycle.AFTER); verify(adaptor, times(1)).fireCustomLifecycle(isA(BeforeRules.class)); verify(adaptor, times(1)).fireCustomLifecycle(isA(AfterRules.class)); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldCallAfterRuleIfFailureInAfterRule() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); Result result = run(adaptor, ClassWithArquillianClassAndMethodRuleWithExceptionInAfterRule.class); Assert.assertFalse(result.wasSuccessful()); Assert.assertEquals(1, result.getFailureCount()); Assert.assertTrue(result.getFailures().get(0).getMessage().equals("AfterRuleException")); assertCycle(1, Cycle.basics()); verify(adaptor, times(1)).fireCustomLifecycle(isA(BeforeRules.class)); verify(adaptor, times(1)).fireCustomLifecycle(isA(AfterRules.class)); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldThrowMultipleExceptionIfFailureInBeforeAndAfterRule() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); doAnswer(new ThrowsException(new RuntimeException("AfterRuleException"))).when(adaptor) .fireCustomLifecycle(isA(AfterRules.class)); doAnswer(new ExecuteLifecycle()).when(adaptor).fireCustomLifecycle(isA(BeforeRules.class)); doAnswer(new ExecuteLifecycle()).when(adaptor).beforeClass(any(Class.class), any(LifecycleMethodExecutor.class)); doAnswer(new ExecuteLifecycle()).when(adaptor).afterClass(any(Class.class), any(LifecycleMethodExecutor.class)); doAnswer(new ExecuteLifecycle()).when(adaptor) .before(any(Object.class), any(Method.class), any(LifecycleMethodExecutor.class)); doAnswer(new ExecuteLifecycle()).when(adaptor) .after(any(Object.class), any(Method.class), any(LifecycleMethodExecutor.class)); doAnswer(new TestExecuteLifecycle(TestResult.passed())).when(adaptor).test(any(TestMethodExecutor.class)); Result result = run(adaptor, ClassWithArquillianClassAndMethodRuleWithExceptionInAfterAndAfterRule.class); Assert.assertFalse(result.wasSuccessful()); Assert.assertEquals(2, result.getFailureCount()); Assert.assertTrue(result.getFailures().get(0).getMessage().equals("AfterException")); Assert.assertTrue(result.getFailures().get(1).getMessage().equals("AfterRuleException")); assertCycle(1, Cycle.basics()); verify(adaptor, times(1)).fireCustomLifecycle(isA(BeforeRules.class)); verify(adaptor, times(1)).fireCustomLifecycle(isA(AfterRules.class)); verify(adaptor, times(1)).beforeSuite(); verify(adaptor, times(1)).afterSuite(); } @Test public void shouldThrowExceptionIfTestClassContainsArquillianRunnerAndRule() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); Result result = run(adaptor, ClassWithArquillianClassAndMethodRuleAndWithArquillianRunner.class); Assert.assertFalse(result.wasSuccessful()); assertCycle(0, Cycle.basics()); Assert.assertTrue(result.getFailures().get(0).getMessage().equals( "TestClass: org.jboss.arquillian.junit.ClassWithArquillianClassAndMethodRuleAndWithArquillianRunner contains " + "Arquillian runner and Arquillian Rule. Arquillian doesn't support @RunWith(Arquillian.class) and " + "ArquillianTestClass or ArquillianTest to use at the same time. You have to decide whether you want use " + "runner: http://arquillian.org/arquillian-core/#how-it-works or " + "rules : http://arquillian.org/arquillian-core/#_how_to_use_it")); } @Test public void shouldThrowExceptionIfTestClassContainsOnlyArquillianTestRule() throws Exception { TestRunnerAdaptor adaptor = mock(TestRunnerAdaptor.class); executeAllLifeCycles(adaptor); Result result = run(adaptor, ClassWithArquillianMethodRule.class); Assert.assertFalse(result.wasSuccessful()); assertCycle(1, Cycle.BEFORE_CLASS, Cycle.AFTER_CLASS); assertCycle(0, Cycle.BEFORE, Cycle.BEFORE_RULE, Cycle.AFTER_RULE, Cycle.AFTER); Assert.assertTrue(result.getFailures().get(0).getMessage().equals( "arquillian not initialized. Please make sure to define `ArquillianTestClass` Rule in your testclass. " + "This could be one of the reason for arquillian not to be initialized.")); } }
/** * Copyright 2005-2015 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kns.workflow.attribute; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.kuali.rice.core.api.config.module.RunMode; import org.kuali.rice.core.api.config.property.ConfigContext; import org.kuali.rice.core.api.CoreApiServiceLocator; import org.kuali.rice.core.api.config.property.ConfigurationService; import org.kuali.rice.core.api.uif.RemotableAttributeError; import org.kuali.rice.core.api.uif.RemotableAttributeField; import org.kuali.rice.core.api.util.io.SerializationUtils; import org.kuali.rice.kew.api.KewApiConstants; import org.kuali.rice.kew.api.document.DocumentWithContent; import org.kuali.rice.kew.api.document.attribute.DocumentAttribute; import org.kuali.rice.kew.api.document.attribute.DocumentAttributeFactory; import org.kuali.rice.kew.api.document.attribute.DocumentAttributeString; import org.kuali.rice.kew.api.document.attribute.WorkflowAttributeDefinition; import org.kuali.rice.kew.api.document.search.DocumentSearchCriteria; import org.kuali.rice.kew.api.exception.WorkflowException; import org.kuali.rice.kew.api.extension.ExtensionDefinition; import org.kuali.rice.kew.framework.document.attribute.SearchableAttribute; import org.kuali.rice.kns.datadictionary.MaintenanceDocumentEntry; import org.kuali.rice.kns.document.MaintenanceDocument; import org.kuali.rice.kns.lookup.LookupUtils; import org.kuali.rice.kns.maintenance.KualiGlobalMaintainableImpl; import org.kuali.rice.kns.maintenance.Maintainable; import org.kuali.rice.kns.service.DictionaryValidationService; import org.kuali.rice.kns.service.KNSServiceLocator; import org.kuali.rice.kns.service.WorkflowAttributePropertyResolutionService; import org.kuali.rice.kns.util.FieldUtils; import org.kuali.rice.kns.web.ui.Field; import org.kuali.rice.kns.web.ui.Row; import org.kuali.rice.krad.bo.BusinessObject; import org.kuali.rice.krad.bo.DocumentHeader; import org.kuali.rice.kns.bo.GlobalBusinessObject; import org.kuali.rice.krad.bo.PersistableBusinessObject; import org.kuali.rice.krad.datadictionary.BusinessObjectEntry; import org.kuali.rice.krad.datadictionary.DocumentEntry; import org.kuali.rice.krad.datadictionary.SearchingAttribute; import org.kuali.rice.krad.datadictionary.SearchingTypeDefinition; import org.kuali.rice.krad.datadictionary.WorkflowAttributes; import org.kuali.rice.krad.document.Document; import org.kuali.rice.krad.service.DocumentService; import org.kuali.rice.krad.service.KRADServiceLocatorWeb; import org.kuali.rice.krad.util.ErrorMessage; import org.kuali.rice.krad.util.GlobalVariables; import org.kuali.rice.krad.util.KRADPropertyConstants; import org.kuali.rice.krad.util.MessageMap; import org.kuali.rice.krad.util.ObjectUtils; import java.text.MessageFormat; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * @deprecated Only used by KNS classes, no replacement. */ @Deprecated public class DataDictionarySearchableAttribute implements SearchableAttribute { private static final long serialVersionUID = 173059488280366451L; private static final Logger LOG = Logger.getLogger(DataDictionarySearchableAttribute.class); public static final String DATA_TYPE_BOOLEAN = "boolean"; public static final String KEW_RUN_MODE_PROPERTY = "kew.mode"; @Override public String generateSearchContent(ExtensionDefinition extensionDefinition, String documentTypeName, WorkflowAttributeDefinition attributeDefinition) { return ""; } @Override public List<DocumentAttribute> extractDocumentAttributes(ExtensionDefinition extensionDefinition, DocumentWithContent documentWithContent) { List<DocumentAttribute> attributes = new ArrayList<DocumentAttribute>(); String docId = documentWithContent.getDocument().getDocumentId(); DocumentService docService = KRADServiceLocatorWeb.getDocumentService(); Document doc = null; try { doc = docService.getByDocumentHeaderIdSessionless(docId); } catch (WorkflowException we) { LOG.error( "Unable to retrieve document " + docId + " in getSearchStorageValues()", we); } String attributeValue = ""; if ( doc != null ) { if ( doc.getDocumentHeader() != null ) { attributeValue = doc.getDocumentHeader().getDocumentDescription(); } else { attributeValue = "null document header"; } } else { attributeValue = "null document"; } DocumentAttributeString attribute = DocumentAttributeFactory.createStringAttribute("documentDescription", attributeValue); attributes.add(attribute); attributeValue = ""; if ( doc != null ) { if ( doc.getDocumentHeader() != null ) { attributeValue = doc.getDocumentHeader().getOrganizationDocumentNumber(); } else { attributeValue = "null document header"; } } else { attributeValue = "null document"; } attribute = DocumentAttributeFactory.createStringAttribute("organizationDocumentNumber", attributeValue); attributes.add(attribute); if ( doc != null && doc instanceof MaintenanceDocument) { final Class<? extends BusinessObject> businessObjectClass = getBusinessObjectClass( documentWithContent.getDocument().getDocumentTypeName()); if (businessObjectClass != null) { if (GlobalBusinessObject.class.isAssignableFrom(businessObjectClass)) { final GlobalBusinessObject globalBO = retrieveGlobalBusinessObject(docId, businessObjectClass); if (globalBO != null) { attributes.addAll(findAllDocumentAttributesForGlobalBusinessObject(globalBO)); } } else { attributes.addAll(parsePrimaryKeyValuesFromDocument(businessObjectClass, (MaintenanceDocument)doc)); } } } if ( doc != null ) { DocumentEntry docEntry = KRADServiceLocatorWeb.getDataDictionaryService().getDataDictionary().getDocumentEntry( documentWithContent.getDocument().getDocumentTypeName()); if ( docEntry != null ) { WorkflowAttributes workflowAttributes = docEntry.getWorkflowAttributes(); WorkflowAttributePropertyResolutionService waprs = KNSServiceLocator .getWorkflowAttributePropertyResolutionService(); attributes.addAll(waprs.resolveSearchableAttributeValues(doc, workflowAttributes)); } else { LOG.error("Unable to find DD document entry for document type: " + documentWithContent.getDocument() .getDocumentTypeName()); } } return attributes; } @Override public List<RemotableAttributeField> getSearchFields(ExtensionDefinition extensionDefinition, String documentTypeName) { List<Row> searchRows = getSearchingRows(documentTypeName); return FieldUtils.convertRowsToAttributeFields(searchRows); } /** * Produces legacy KNS rows to use for search attributes. This method was left intact to help ease conversion * until KNS is replaced with KRAD. */ protected List<Row> getSearchingRows(String documentTypeName) { List<Row> docSearchRows = new ArrayList<Row>(); Class boClass = DocumentHeader.class; Field descriptionField = FieldUtils.getPropertyField(boClass, "documentDescription", true); descriptionField.setFieldDataType(KewApiConstants.SearchableAttributeConstants.DATA_TYPE_STRING); Field orgDocNumberField = FieldUtils.getPropertyField(boClass, "organizationDocumentNumber", true); orgDocNumberField.setFieldDataType(KewApiConstants.SearchableAttributeConstants.DATA_TYPE_STRING); List<Field> fieldList = new ArrayList<Field>(); fieldList.add(descriptionField); docSearchRows.add(new Row(fieldList)); fieldList = new ArrayList<Field>(); fieldList.add(orgDocNumberField); docSearchRows.add(new Row(fieldList)); DocumentEntry entry = KRADServiceLocatorWeb.getDataDictionaryService().getDataDictionary().getDocumentEntry(documentTypeName); if (entry == null) return docSearchRows; if (entry instanceof MaintenanceDocumentEntry) { Class<? extends BusinessObject> businessObjectClass = getBusinessObjectClass(documentTypeName); Class<? extends Maintainable> maintainableClass = getMaintainableClass(documentTypeName); KualiGlobalMaintainableImpl globalMaintainable = null; try { globalMaintainable = (KualiGlobalMaintainableImpl)maintainableClass.newInstance(); businessObjectClass = globalMaintainable.getPrimaryEditedBusinessObjectClass(); } catch (Exception ie) { //was not a globalMaintainable. } if (businessObjectClass != null) docSearchRows.addAll(createFieldRowsForBusinessObject(businessObjectClass)); } WorkflowAttributes workflowAttributes = entry.getWorkflowAttributes(); if (workflowAttributes != null) docSearchRows.addAll(createFieldRowsForWorkflowAttributes(workflowAttributes)); return docSearchRows; } @Override public List<RemotableAttributeError> validateDocumentAttributeCriteria(ExtensionDefinition extensionDefinition, DocumentSearchCriteria documentSearchCriteria) { List<RemotableAttributeError> validationErrors = new ArrayList<RemotableAttributeError>(); DictionaryValidationService validationService = KNSServiceLocator.getKNSDictionaryValidationService(); RunMode kewRunMode = RunMode.valueOf(ConfigContext.getCurrentContextConfig().getProperty(KEW_RUN_MODE_PROPERTY)); if (kewRunMode != RunMode.LOCAL) { GlobalVariables.getMessageMap().clearErrorMessages(); } // validate the document attribute values Map<String, List<String>> documentAttributeValues = documentSearchCriteria.getDocumentAttributeValues(); for (String key : documentAttributeValues.keySet()) { List<String> values = documentAttributeValues.get(key); if (CollectionUtils.isNotEmpty(values)) { for (String value : values) { if (StringUtils.isNotBlank(value)) { validationService.validateAttributeFormat(documentSearchCriteria.getDocumentTypeName(), key, value, key); } } } } retrieveValidationErrorsFromGlobalVariables(validationErrors); return validationErrors; } /** * Retrieves validation errors from GlobalVariables MessageMap and appends to the given list of RemotableAttributeError * @param validationErrors list to append validation errors */ protected void retrieveValidationErrorsFromGlobalVariables(List<RemotableAttributeError> validationErrors) { // can we use KualiConfigurationService? It seemed to be used elsewhere... ConfigurationService configurationService = CoreApiServiceLocator.getKualiConfigurationService(); if(GlobalVariables.getMessageMap().hasErrors()){ MessageMap deepCopy = (MessageMap) SerializationUtils.deepCopy(GlobalVariables.getMessageMap()); for (String errorKey : deepCopy.getErrorMessages().keySet()) { List<ErrorMessage> errorMessages = deepCopy.getErrorMessages().get(errorKey); if (CollectionUtils.isNotEmpty(errorMessages)) { List<String> errors = new ArrayList<String>(); for (ErrorMessage errorMessage : errorMessages) { // need to materialize the message from it's parameters so we can send it back to the framework String error = MessageFormat.format(configurationService.getPropertyValueAsString(errorMessage.getErrorKey()), errorMessage.getMessageParameters()); errors.add(error); } RemotableAttributeError remotableAttributeError = RemotableAttributeError.Builder.create(errorKey, errors).build(); validationErrors.add(remotableAttributeError); } } // we should now strip the error messages from the map because they have moved to validationErrors GlobalVariables.getMessageMap().clearErrorMessages(); } } protected List<Row> createFieldRowsForWorkflowAttributes(WorkflowAttributes attrs) { List<Row> searchFields = new ArrayList<Row>(); List<SearchingTypeDefinition> searchingTypeDefinitions = attrs.getSearchingTypeDefinitions(); final WorkflowAttributePropertyResolutionService propertyResolutionService = KNSServiceLocator .getWorkflowAttributePropertyResolutionService(); for (SearchingTypeDefinition definition: searchingTypeDefinitions) { SearchingAttribute attr = definition.getSearchingAttribute(); final String attributeName = attr.getAttributeName(); final String businessObjectClassName = attr.getBusinessObjectClassName(); Class boClass = null; Object businessObject = null; try { boClass = Class.forName(businessObjectClassName); businessObject = (Object)boClass.newInstance(); } catch (Exception e) { throw new RuntimeException(e); } Field searchField = FieldUtils.getPropertyField(boClass, attributeName, false); // prepend all document attribute field names with "documentAttribute." //searchField.setPropertyName(KewApiConstants.DOCUMENT_ATTRIBUTE_FIELD_PREFIX + searchField.getPropertyName()); searchField.setColumnVisible(attr.isShowAttributeInResultSet()); //TODO this is a workaround to hide the Field from the search criteria. //This should be removed once hiding the entire Row is working if (!attr.isShowAttributeInSearchCriteria()){ searchField.setFieldType(Field.HIDDEN); } String fieldDataType = propertyResolutionService.determineFieldDataType(boClass, attributeName); if (fieldDataType.equals(DataDictionarySearchableAttribute.DATA_TYPE_BOOLEAN)) { fieldDataType = KewApiConstants.SearchableAttributeConstants.DATA_TYPE_STRING; } // Allow inline range searching on dates and numbers if (fieldDataType.equals(KewApiConstants.SearchableAttributeConstants.DATA_TYPE_FLOAT) || fieldDataType.equals(KewApiConstants.SearchableAttributeConstants.DATA_TYPE_LONG) || fieldDataType.equals(KewApiConstants.SearchableAttributeConstants.DATA_TYPE_DATE)) { searchField.setAllowInlineRange(true); } searchField.setFieldDataType(fieldDataType); List displayedFieldNames = new ArrayList(); displayedFieldNames.add(attributeName); LookupUtils.setFieldQuickfinder(businessObject, attributeName, searchField, displayedFieldNames); List<Field> fieldList = new ArrayList<Field>(); fieldList.add(searchField); Row row = new Row(fieldList); if (!attr.isShowAttributeInSearchCriteria()) { row.setHidden(true); } searchFields.add(row); } return searchFields; } protected List<DocumentAttribute> parsePrimaryKeyValuesFromDocument(Class<? extends BusinessObject> businessObjectClass, MaintenanceDocument document) { List<DocumentAttribute> values = new ArrayList<DocumentAttribute>(); final List primaryKeyNames = KRADServiceLocatorWeb.getLegacyDataAdapter().listPrimaryKeyFieldNames(businessObjectClass); for (Object primaryKeyNameAsObj : primaryKeyNames) { final String primaryKeyName = (String)primaryKeyNameAsObj; final DocumentAttribute searchableValue = parseSearchableAttributeValueForPrimaryKey(primaryKeyName, businessObjectClass, document); if (searchableValue != null) { values.add(searchableValue); } } return values; } /** * Creates a searchable attribute value for the given property name out of the document XML * @param propertyName the name of the property to return * @param businessObjectClass the class of the business object maintained * @param document the document XML * @return a generated SearchableAttributeValue, or null if a value could not be created */ protected DocumentAttribute parseSearchableAttributeValueForPrimaryKey(String propertyName, Class<? extends BusinessObject> businessObjectClass, MaintenanceDocument document) { Maintainable maintainable = document.getNewMaintainableObject(); PersistableBusinessObject bo = maintainable.getBusinessObject(); final Object propertyValue = ObjectUtils.getPropertyValue(bo, propertyName); if (propertyValue == null) return null; final WorkflowAttributePropertyResolutionService propertyResolutionService = KNSServiceLocator .getWorkflowAttributePropertyResolutionService(); DocumentAttribute value = propertyResolutionService.buildSearchableAttribute(businessObjectClass, propertyName, propertyValue); return value; } /** * Returns the class of the object being maintained by the given maintenance document type name * @param documentTypeName the name of the document type to look up the maintained business object for * @return the class of the maintained business object */ protected Class<? extends BusinessObject> getBusinessObjectClass(String documentTypeName) { MaintenanceDocumentEntry entry = retrieveMaintenanceDocumentEntry(documentTypeName); return (entry == null ? null : (Class<? extends BusinessObject>) entry.getDataObjectClass()); } /** * Returns the maintainable of the object being maintained by the given maintenance document type name * @param documentTypeName the name of the document type to look up the maintained business object for * @return the Maintainable of the maintained business object */ protected Class<? extends Maintainable> getMaintainableClass(String documentTypeName) { MaintenanceDocumentEntry entry = retrieveMaintenanceDocumentEntry(documentTypeName); return (entry == null ? null : entry.getMaintainableClass()); } /** * Retrieves the maintenance document entry for the given document type name * @param documentTypeName the document type name to look up the data dictionary document entry for * @return the corresponding data dictionary entry for a maintenance document */ protected MaintenanceDocumentEntry retrieveMaintenanceDocumentEntry(String documentTypeName) { return (MaintenanceDocumentEntry) KRADServiceLocatorWeb.getDataDictionaryService().getDataDictionary().getDocumentEntry(documentTypeName); } protected GlobalBusinessObject retrieveGlobalBusinessObject(String documentNumber, Class<? extends BusinessObject> businessObjectClass) { GlobalBusinessObject globalBO = null; Map pkMap = new LinkedHashMap(); pkMap.put(KRADPropertyConstants.DOCUMENT_NUMBER, documentNumber); List returnedBOs = (List) KNSServiceLocator.getBusinessObjectService().findMatching(businessObjectClass, pkMap); if (returnedBOs.size() > 0) { globalBO = (GlobalBusinessObject)returnedBOs.get(0); } return globalBO; } protected List<DocumentAttribute> findAllDocumentAttributesForGlobalBusinessObject(GlobalBusinessObject globalBO) { List<DocumentAttribute> searchValues = new ArrayList<DocumentAttribute>(); for (Object bo : globalBO.generateGlobalChangesToPersist()) { DocumentAttribute value = generateSearchableAttributeFromChange(bo); if (value != null) { searchValues.add(value); } } return searchValues; } protected DocumentAttribute generateSearchableAttributeFromChange(Object changeToPersist) { List<String> primaryKeyNames = KRADServiceLocatorWeb.getLegacyDataAdapter().listPrimaryKeyFieldNames(changeToPersist.getClass()); for (Object primaryKeyNameAsObject : primaryKeyNames) { String primaryKeyName = (String)primaryKeyNameAsObject; Object value = ObjectUtils.getPropertyValue(changeToPersist, primaryKeyName); if (value != null) { final WorkflowAttributePropertyResolutionService propertyResolutionService = KNSServiceLocator .getWorkflowAttributePropertyResolutionService(); DocumentAttribute saValue = propertyResolutionService.buildSearchableAttribute((Class)changeToPersist.getClass(), primaryKeyName, value); return saValue; } } return null; } /** * Creates a list of search fields, one for each primary key of the maintained business object * @param businessObjectClass the class of the maintained business object * @return a List of KEW search fields */ protected List<Row> createFieldRowsForBusinessObject(Class<? extends BusinessObject> businessObjectClass) { List<Row> searchFields = new ArrayList<Row>(); final List primaryKeyNamesAsObjects = KRADServiceLocatorWeb.getLegacyDataAdapter().listPrimaryKeyFieldNames(businessObjectClass); final BusinessObjectEntry boEntry = KRADServiceLocatorWeb.getDataDictionaryService().getDataDictionary().getBusinessObjectEntry(businessObjectClass.getName()); final WorkflowAttributePropertyResolutionService propertyResolutionService = KNSServiceLocator .getWorkflowAttributePropertyResolutionService(); for (Object primaryKeyNameAsObject : primaryKeyNamesAsObjects) { String attributeName = (String)primaryKeyNameAsObject; BusinessObject businessObject = null; try { businessObject = businessObjectClass.newInstance(); } catch (Exception e) { throw new RuntimeException(e); } Field searchField = FieldUtils.getPropertyField(businessObjectClass, attributeName, false); String dataType = propertyResolutionService.determineFieldDataType(businessObjectClass, attributeName); searchField.setFieldDataType(dataType); List<Field> fieldList = new ArrayList<Field>(); List displayedFieldNames = new ArrayList(); displayedFieldNames.add(attributeName); LookupUtils.setFieldQuickfinder(businessObject, attributeName, searchField, displayedFieldNames); fieldList.add(searchField); searchFields.add(new Row(fieldList)); } return searchFields; } }
package com.rubber.frames; import com.rubber.Env; import com.rubber.Statics; import java.awt.Frame; import java.io.File; import javax.swing.JFileChooser; import javax.swing.filechooser.FileNameExtensionFilter; /** * * @author Iegor */ public class Main extends javax.swing.JFrame implements IFrameEvent { private String jButtonText = null; /** * Creates new form Main */ public Main() { initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jDialog1 = new javax.swing.JDialog(); jFileChooser1 = new javax.swing.JFileChooser(); jDialog2 = new javax.swing.JDialog(); jSeparator2 = new javax.swing.JSeparator(); jLabel3 = new javax.swing.JLabel(); jLabel5 = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); jLabel7 = new javax.swing.JLabel(); jLabel8 = new javax.swing.JLabel(); jDialog3 = new javax.swing.JDialog(); jLabel9 = new javax.swing.JLabel(); jDialog4 = new javax.swing.JDialog(); jFileChooser2 = new javax.swing.JFileChooser(); jButton1 = new javax.swing.JButton(); jTabbedPane1 = new javax.swing.JTabbedPane(); jPanel1 = new javax.swing.JPanel(); jTextField2 = new javax.swing.JTextField(); jLabel2 = new javax.swing.JLabel(); jTextField1 = new javax.swing.JTextField(); jLabel1 = new javax.swing.JLabel(); jLabel4 = new javax.swing.JLabel(); jCheckBox1 = new javax.swing.JCheckBox(); jPanel2 = new javax.swing.JPanel(); jCheckBox2 = new javax.swing.JCheckBox(); jButton2 = new javax.swing.JButton(); jScrollPane1 = new javax.swing.JScrollPane(); jTextArea1 = new javax.swing.JTextArea(); jMenuBar1 = new javax.swing.JMenuBar(); jMenu1 = new javax.swing.JMenu(); jMenuItem3 = new javax.swing.JMenuItem(); jSeparator1 = new javax.swing.JPopupMenu.Separator(); jMenuItem1 = new javax.swing.JMenuItem(); jMenu2 = new javax.swing.JMenu(); jMenuItem2 = new javax.swing.JMenuItem(); jMenuItem4 = new javax.swing.JMenuItem(); FileNameExtensionFilter filter = new FileNameExtensionFilter("SWF", "swf"); jFileChooser1.setAcceptAllFileFilterUsed(false); jFileChooser1.setFileFilter(filter); jFileChooser1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jFileChooser1ActionPerformed(evt); } }); javax.swing.GroupLayout jDialog1Layout = new javax.swing.GroupLayout(jDialog1.getContentPane()); jDialog1.getContentPane().setLayout(jDialog1Layout); jDialog1Layout.setHorizontalGroup( jDialog1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 623, Short.MAX_VALUE) .addGroup(jDialog1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jDialog1Layout.createSequentialGroup() .addGap(0, 0, Short.MAX_VALUE) .addComponent(jFileChooser1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, Short.MAX_VALUE))) ); jDialog1Layout.setVerticalGroup( jDialog1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 397, Short.MAX_VALUE) .addGroup(jDialog1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jDialog1Layout.createSequentialGroup() .addGap(0, 0, Short.MAX_VALUE) .addComponent(jFileChooser1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, Short.MAX_VALUE))) ); jLabel3.setText("Changelog for the actual version:"); jLabel5.setText("v" + Statics.VERSION); jLabel6.setText("- Injection accuracy improved."); jLabel7.setText("- Patched for the latest versions."); jLabel8.setText("- Some errors fixed."); javax.swing.GroupLayout jDialog2Layout = new javax.swing.GroupLayout(jDialog2.getContentPane()); jDialog2.getContentPane().setLayout(jDialog2Layout); jDialog2Layout.setHorizontalGroup( jDialog2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jDialog2Layout.createSequentialGroup() .addContainerGap() .addGroup(jDialog2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jSeparator2) .addGroup(jDialog2Layout.createSequentialGroup() .addComponent(jLabel3) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 16, Short.MAX_VALUE) .addComponent(jLabel5)) .addGroup(jDialog2Layout.createSequentialGroup() .addGroup(jDialog2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel6) .addComponent(jLabel7) .addComponent(jLabel8)) .addGap(0, 0, Short.MAX_VALUE))) .addContainerGap()) ); jDialog2Layout.setVerticalGroup( jDialog2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jDialog2Layout.createSequentialGroup() .addGap(17, 17, 17) .addGroup(jDialog2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel3) .addComponent(jLabel5)) .addGap(18, 18, 18) .addComponent(jSeparator2, javax.swing.GroupLayout.PREFERRED_SIZE, 22, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, 0) .addComponent(jLabel6) .addGap(6, 6, 6) .addComponent(jLabel7) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel8) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jLabel9.setText("This tool was developed by ".concat(Statics.AUTHOR).concat(" with the help of Nasty35.")); javax.swing.GroupLayout jDialog3Layout = new javax.swing.GroupLayout(jDialog3.getContentPane()); jDialog3.getContentPane().setLayout(jDialog3Layout); jDialog3Layout.setHorizontalGroup( jDialog3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jDialog3Layout.createSequentialGroup() .addContainerGap() .addComponent(jLabel9) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jDialog3Layout.setVerticalGroup( jDialog3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jDialog3Layout.createSequentialGroup() .addContainerGap() .addComponent(jLabel9) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jLabel9.getAccessibleContext().setAccessibleName(""); FileNameExtensionFilter filter2 = new FileNameExtensionFilter("ASASM", "asasm"); jFileChooser2.setAcceptAllFileFilterUsed(false); jFileChooser2.setFileFilter(filter2); jFileChooser2.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jFileChooser2ActionPerformed(evt); } }); javax.swing.GroupLayout jDialog4Layout = new javax.swing.GroupLayout(jDialog4.getContentPane()); jDialog4.getContentPane().setLayout(jDialog4Layout); jDialog4Layout.setHorizontalGroup( jDialog4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jDialog4Layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jFileChooser2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); jDialog4Layout.setVerticalGroup( jDialog4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jDialog4Layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jFileChooser2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); setBackground(new java.awt.Color(45, 45, 48)); jButton1.setText("Crack it"); jButton1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton1ActionPerformed(evt); } }); jPanel1.setBackground(new java.awt.Color(255, 255, 255)); jTextField2.setText(Statics.portcrack); jLabel2.setText("Port:"); jTextField1.setText(Statics.domaincrack); jTextField1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jTextField1ActionPerformed(evt); } }); jLabel1.setText("Domain:"); jLabel4.setText("Do you want to disable the xmlsocket policy security?"); jCheckBox1.setBackground(new java.awt.Color(255, 255, 255)); jCheckBox1.setText("Yes, i want."); jCheckBox1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jCheckBox1ActionPerformed(evt); } }); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 358, Short.MAX_VALUE) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addGroup(javax.swing.GroupLayout.Alignment.LEADING, jPanel1Layout.createSequentialGroup() .addComponent(jLabel4) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jCheckBox1)) .addGroup(javax.swing.GroupLayout.Alignment.LEADING, jPanel1Layout.createSequentialGroup() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jLabel1) .addComponent(jLabel2)) .addGap(14, 14, 14) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jTextField2) .addComponent(jTextField1)))) .addContainerGap())) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 109, Short.MAX_VALUE) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel2) .addComponent(jTextField2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(18, 18, 18) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(jCheckBox1)) .addContainerGap(11, Short.MAX_VALUE))) ); jTabbedPane1.addTab("Basic", jPanel1); jPanel2.setBackground(new java.awt.Color(255, 255, 255)); jCheckBox2.setBackground(new java.awt.Color(255, 255, 255)); jCheckBox2.setText("Enable advanced injection"); jCheckBox2.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jCheckBox2ActionPerformed(evt); } }); jButton2.setText("Add custom component"); jButton2.setEnabled(jCheckBox2.isSelected()); jButton2.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton2ActionPerformed(evt); } }); jTextArea1.setEditable(false); jTextArea1.setColumns(20); jTextArea1.setRows(5); jTextArea1.setText("Nothing found..."); jScrollPane1.setViewportView(jTextArea1); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jScrollPane1) .addContainerGap()) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jCheckBox2) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 46, Short.MAX_VALUE) .addComponent(jButton2) .addGap(6, 6, 6)))) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jCheckBox2) .addComponent(jButton2)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 56, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jTabbedPane1.addTab("Advanced", jPanel2); jMenu1.setText("File"); jMenuItem3.setText("Open swf..."); jMenuItem3.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jMenuItem3ActionPerformed(evt); } }); jMenu1.add(jMenuItem3); jMenu1.add(jSeparator1); jMenuItem1.setText("Exit..."); jMenuItem1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jMenuItem1ActionPerformed(evt); } }); jMenu1.add(jMenuItem1); jMenuBar1.add(jMenu1); jMenu2.setText("Help"); jMenuItem2.setText("Changelog..."); jMenuItem2.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jMenuItem2ActionPerformed(evt); } }); jMenu2.add(jMenuItem2); jMenuItem4.setText("Credits..."); jMenuItem4.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jMenuItem4ActionPerformed(evt); } }); jMenu2.add(jMenuItem4); jMenuBar1.add(jMenu2); setJMenuBar(jMenuBar1); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jTabbedPane1) .addComponent(jButton1, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(10, 10, 10) .addComponent(jTabbedPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 134, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(11, 11, 11) .addComponent(jButton1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addContainerGap()) ); pack(); }// </editor-fold>//GEN-END:initComponents private void jMenuItem1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItem1ActionPerformed // TODO add your handling code here: System.exit(0); }//GEN-LAST:event_jMenuItem1ActionPerformed private void jTextField1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jTextField1ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_jTextField1ActionPerformed private void jCheckBox1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jCheckBox1ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_jCheckBox1ActionPerformed private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed // TODO add your handling code here: if (Statics.fileChoosed == null) { javax.swing.JOptionPane.showMessageDialog(this, "Please select a swf file to start. Go to File > Open swf...", "Select a file", javax.swing.JOptionPane.INFORMATION_MESSAGE); } else { jButtonText = jButton1.getText(); jButton1.setText("Please wait, this can take a while..."); jButton1.setEnabled(false); jMenuItem3.setEnabled(false); //Disable the open file option Statics.xmlcrack = jCheckBox1.isSelected(); Statics.advancedCrack = jCheckBox2.isSelected(); Statics.domaincrack = jTextField1.getText(); Statics.portcrack = jTextField2.getText(); Env.getManager().getThreads().executeOthers(new com.rubber.core.Processor(this)); } }//GEN-LAST:event_jButton1ActionPerformed private void jMenuItem2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItem2ActionPerformed // TODO add your handling code here: com.rubber.utils.Prettify.Dialog(jDialog2, "Changelog", false); }//GEN-LAST:event_jMenuItem2ActionPerformed private void jMenuItem3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItem3ActionPerformed // TODO add your handling code here: com.rubber.utils.Prettify.Dialog(jDialog1, "Select the file you want to crack...", false); }//GEN-LAST:event_jMenuItem3ActionPerformed private void jFileChooser1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jFileChooser1ActionPerformed // TODO add your handling code here: JFileChooser fileChooser = (JFileChooser) evt.getSource(); String command = evt.getActionCommand(); switch (command) { case JFileChooser.APPROVE_SELECTION: Statics.fileChoosed = fileChooser.getSelectedFile(); jDialog1.dispose(); break; case JFileChooser.CANCEL_SELECTION: jDialog1.dispose(); break; } }//GEN-LAST:event_jFileChooser1ActionPerformed private void jMenuItem4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItem4ActionPerformed // TODO add your handling code here: com.rubber.utils.Prettify.Dialog(jDialog3, "Credits", false); }//GEN-LAST:event_jMenuItem4ActionPerformed private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed // TODO add your handling code here: if(!jCheckBox2.isSelected()) { javax.swing.JOptionPane.showMessageDialog(jDialog4, "Please, enable the advanced injection checkbox to add custom components!", "Information", javax.swing.JOptionPane.INFORMATION_MESSAGE); return; } com.rubber.utils.Prettify.Dialog(jDialog4, "Select the file with the name of the component you want to replace.", false); }//GEN-LAST:event_jButton2ActionPerformed private void jFileChooser2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jFileChooser2ActionPerformed // TODO add your handling code here: JFileChooser fileChooser = (JFileChooser) evt.getSource(); String command = evt.getActionCommand(); switch (command) { case JFileChooser.APPROVE_SELECTION: if(Statics.advancedCrackFilesArr.contains(fileChooser.getSelectedFile())) { javax.swing.JOptionPane.showMessageDialog(jDialog4, "The file you want to add is already added, select other!", "Warning", javax.swing.JOptionPane.WARNING_MESSAGE); break; } Statics.advancedCrackFilesArr.add(fileChooser.getSelectedFile()); jTextArea1.setText(""); Statics.advancedCrackFilesArr.stream().forEach((fileName) -> { jTextArea1.append(fileName.toString().concat("\n")); }); jDialog4.dispose(); break; case JFileChooser.CANCEL_SELECTION: jDialog4.dispose(); break; } }//GEN-LAST:event_jFileChooser2ActionPerformed private void jCheckBox2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jCheckBox2ActionPerformed // TODO add your handling code here: jButton2.setEnabled(jCheckBox2.isSelected()); }//GEN-LAST:event_jCheckBox2ActionPerformed public void finishProcess() { jButton1.setEnabled(true); jButton1.setText(jButtonText); jMenuItem3.setEnabled(true); javax.swing.JOptionPane.showMessageDialog(this, "The file was cracked and compiled successfully!", "Success", javax.swing.JOptionPane.INFORMATION_MESSAGE); } @Override public void invoke() { /* Set windows look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { javax.swing.UIManager.setLookAndFeel(javax.swing.UIManager.getSystemLookAndFeelClassName()); } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(Main.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(Main.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(Main.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(Main.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the form */ java.awt.EventQueue.invokeLater(new Runnable() { public void run() { Frame Main = new Main(); com.rubber.utils.Prettify.Windows(Main, "Xat decompiler by Returns();", false); } }); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton jButton1; private javax.swing.JButton jButton2; private javax.swing.JCheckBox jCheckBox1; private javax.swing.JCheckBox jCheckBox2; private javax.swing.JDialog jDialog1; private javax.swing.JDialog jDialog2; private javax.swing.JDialog jDialog3; private javax.swing.JDialog jDialog4; private javax.swing.JFileChooser jFileChooser1; private javax.swing.JFileChooser jFileChooser2; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JLabel jLabel8; private javax.swing.JLabel jLabel9; private javax.swing.JMenu jMenu1; private javax.swing.JMenu jMenu2; private javax.swing.JMenuBar jMenuBar1; private javax.swing.JMenuItem jMenuItem1; private javax.swing.JMenuItem jMenuItem2; private javax.swing.JMenuItem jMenuItem3; private javax.swing.JMenuItem jMenuItem4; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JPopupMenu.Separator jSeparator1; private javax.swing.JSeparator jSeparator2; private javax.swing.JTabbedPane jTabbedPane1; private javax.swing.JTextArea jTextArea1; private javax.swing.JTextField jTextField1; private javax.swing.JTextField jTextField2; // End of variables declaration//GEN-END:variables }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.crawler.connectors.webcrawler.tests; import org.apache.manifoldcf.agents.interfaces.IOutputConnection; import org.apache.manifoldcf.agents.interfaces.IOutputConnectionManager; import org.apache.manifoldcf.agents.interfaces.OutputConnectionManagerFactory; import org.apache.manifoldcf.core.interfaces.ConfigParams; import org.apache.manifoldcf.core.interfaces.IResultRow; import org.apache.manifoldcf.core.interfaces.IResultSet; import org.apache.manifoldcf.core.interfaces.IThreadContext; import org.apache.manifoldcf.core.interfaces.ManifoldCFException; import org.apache.manifoldcf.core.interfaces.Specification; import org.apache.manifoldcf.core.interfaces.SpecificationNode; import org.apache.manifoldcf.core.interfaces.ThreadContextFactory; import org.apache.manifoldcf.crawler.interfaces.*; import org.apache.manifoldcf.crawler.connectors.webcrawler.WebcrawlerConfig; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.junit.After; import org.junit.Before; import org.junit.Test; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; public class DocumentContentExclusionHSQLDBIT extends BaseITHSQLDB { private static final int MAX_DOC_COUNT = 3; public static final String CONTENTFILTER_SERVLET_PATH = "/contentexclusiontest"; private static final int PORT = 8191; public static final long MAX_WAIT_TIME = 60 * 1000L; public static final String WEB_CONNECTION = "Web Connection"; static String baseUrl = "http://127.0.0.1:" + PORT + CONTENTFILTER_SERVLET_PATH + "?page="; private Server server = null; private IJobManager jobManager; private IOutputConnectionManager outputConnectionManager; private IRepositoryConnectionManager repoConnectionManager; @Before public void beforeDocumentContentFilterTest() throws Exception { server = new Server(new QueuedThreadPool(20)); ServerConnector connector = new ServerConnector(server); connector.setPort(PORT); connector.setIdleTimeout(60000);// important for Http KeepAlive server.addConnector(connector); ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); context.addServlet(ContentFilterTestServlet.class, CONTENTFILTER_SERVLET_PATH); server.setHandler(context); server.start(); IThreadContext tc = ThreadContextFactory.make(); repoConnectionManager = RepositoryConnectionManagerFactory.make(tc); outputConnectionManager = OutputConnectionManagerFactory.make(tc); jobManager = JobManagerFactory.make(tc); createRepoConnector(); createOutputConnector(); } @Test public void testDocumentContentExclusion() throws Exception { //No content exclusion rule IJobDescription job = setupContentFilterJob(); runContentFilterJob(job); checkContentFilterHistory(false); cleanupContentFilterJobs(job); //With exclusion rule job = setupContentFilterJob(); //add content exclusion rule addContentExclusionRule(job); runContentFilterJob(job); checkContentFilterHistory(true); cleanupContentFilterJobs(job); } private void checkContentFilterHistory(boolean hasContentExcluded) throws Exception { FilterCriteria filter = new FilterCriteria(new String[]{"process"}, 0l, Long.MAX_VALUE, new RegExpCriteria(".*\\" + CONTENTFILTER_SERVLET_PATH + ".*", true), null); SortOrder sortOrderValue = new SortOrder(); sortOrderValue.addCriteria("entityid", SortOrder.SORT_ASCENDING); IResultSet result = repoConnectionManager.genHistorySimple(WEB_CONNECTION, filter, sortOrderValue, 0, 20); assertThat(result.getRowCount(), is(MAX_DOC_COUNT)); for (int i = 0; i < MAX_DOC_COUNT; i++) { IResultRow row = result.getRow(i); assertThat((String) row.getValue("identifier"), is(baseUrl + i)); if (hasContentExcluded && i == 1) { //if excluding, only page 1 will be excluded assertThat((String) row.getValue("resultcode"), is("EXCLUDEDCONTENT")); assertThat((String) row.getValue("resultdesc"), is("Rejected due to content exclusion rule")); } else { assertThat((String) row.getValue("resultcode"), is("OK")); assertThat(row.getValue("resultdesc"), is(nullValue())); } } } @After public void tearDownDocumentContentFilterTest() throws Exception { if (server != null) { server.stop(); } } private IJobDescription setupContentFilterJob() throws Exception { // Create a job. IJobDescription job = jobManager.createJob(); job.setDescription("Test Job"); job.setConnectionName(WEB_CONNECTION); job.addPipelineStage(-1, true, "Null Connection", ""); job.setType(job.TYPE_SPECIFIED); job.setStartMethod(job.START_DISABLE); job.setHopcountMode(job.HOPCOUNT_NEVERDELETE); Specification jobSpec = job.getSpecification(); // 3 seeds only SpecificationNode sn = new SpecificationNode(WebcrawlerConfig.NODE_SEEDS); StringBuilder sb = new StringBuilder(); for (int i = 0; i < MAX_DOC_COUNT; i++) { sb.append(baseUrl + i + "\n"); } sn.setValue(sb.toString()); jobSpec.addChild(jobSpec.getChildCount(), sn); sn = new SpecificationNode(WebcrawlerConfig.NODE_INCLUDES); sn.setValue(".*\n"); jobSpec.addChild(jobSpec.getChildCount(), sn); sn = new SpecificationNode(WebcrawlerConfig.NODE_INCLUDESINDEX); sn.setValue(".*\n"); jobSpec.addChild(jobSpec.getChildCount(), sn); // Save the job. jobManager.save(job); return job; } private void addContentExclusionRule(IJobDescription job) throws ManifoldCFException { Specification jobSpec = job.getSpecification(); SpecificationNode sn; sn = new SpecificationNode(WebcrawlerConfig.NODE_EXCLUDESCONTENTINDEX); sn.setValue(".*expired.*\n"); jobSpec.addChild(jobSpec.getChildCount(), sn); jobManager.save(job); } private IOutputConnection createOutputConnector() throws ManifoldCFException { // Create a basic null output connection, and save it. IOutputConnection outputConn = outputConnectionManager.create(); outputConn.setName("Null Connection"); outputConn.setDescription("Null Connection"); outputConn.setClassName("org.apache.manifoldcf.agents.tests.TestingOutputConnector"); outputConn.setMaxConnections(10); // Now, save outputConnectionManager.save(outputConn); return outputConn; } private IRepositoryConnection createRepoConnector() throws ManifoldCFException { //TODO: This is a copy/paste: Could we have common method for creating test jobs??? IRepositoryConnection repoConnection = repoConnectionManager.create(); repoConnection.setName("Web Connection"); repoConnection.setDescription("Web Connection"); repoConnection.setClassName("org.apache.manifoldcf.crawler.connectors.webcrawler.WebcrawlerConnector"); repoConnection.setMaxConnections(50); ConfigParams cp = repoConnection.getConfigParams(); cp.setParameter(WebcrawlerConfig.PARAMETER_EMAIL, "someone@somewhere.com"); cp.setParameter(WebcrawlerConfig.PARAMETER_ROBOTSUSAGE, "none"); repoConnectionManager.save(repoConnection); return repoConnection; } private void cleanupContentFilterJobs(IJobDescription job) throws ManifoldCFException, InterruptedException { repoConnectionManager.cleanUpHistoryData(WEB_CONNECTION); jobManager.deleteJob(job.getID()); mcfInstance.waitJobDeletedNative(jobManager, job.getID(), MAX_WAIT_TIME); } private void runContentFilterJob(IJobDescription job) throws ManifoldCFException, InterruptedException { jobManager.manualStart(job.getID()); try { mcfInstance.waitJobInactiveNative(jobManager, job.getID(), MAX_WAIT_TIME); } catch (ManifoldCFException e) { System.err.println("Halting for inspection"); Thread.sleep(1000L); throw e; } // Check to be sure we actually processed the right number of documents. JobStatus status = jobManager.getStatus(job.getID()); System.err.println("doc processed: " + status.getDocumentsProcessed() + " Job status: " + status.getStatus()); } public static class ContentFilterTestServlet extends HttpServlet { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html; charset=utf-8"); //response.setHeader("Keep-Alive", "timeout=5, max=100"); response.setStatus(HttpServletResponse.SC_OK); String page = request.getParameter("page"); page = (page == null) ? "unkown" : page; response.getWriter().println("<html><head><title></title></head><body><h1>You are now on page " + page + " </h1>"); if ("1".equals(page)) { //Only page 1 will contain the keyword "expired" response.getWriter().println("<h1>Page 1 has expired. bye bye</h1>"); } response.getWriter().println("</body>"); response.getWriter().flush(); } } }
/** * Copyright (C) 2015 The Gravitee team (http://gravitee.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gravitee.rest.api.spec.converter.wsdl.utils; import java.math.BigDecimal; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.HashSet; import java.util.Random; import java.util.Set; import javax.xml.namespace.QName; import org.apache.xmlbeans.GDate; import org.apache.xmlbeans.GDateBuilder; import org.apache.xmlbeans.GDuration; import org.apache.xmlbeans.GDurationBuilder; import org.apache.xmlbeans.SchemaLocalElement; import org.apache.xmlbeans.SchemaParticle; import org.apache.xmlbeans.SchemaProperty; /* Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.xmlbeans.SchemaType; import org.apache.xmlbeans.SimpleValue; import org.apache.xmlbeans.XmlAnySimpleType; import org.apache.xmlbeans.XmlCursor; import org.apache.xmlbeans.XmlDate; import org.apache.xmlbeans.XmlDateTime; import org.apache.xmlbeans.XmlDecimal; import org.apache.xmlbeans.XmlDuration; import org.apache.xmlbeans.XmlGDay; import org.apache.xmlbeans.XmlGMonth; import org.apache.xmlbeans.XmlGMonthDay; import org.apache.xmlbeans.XmlGYear; import org.apache.xmlbeans.XmlGYearMonth; import org.apache.xmlbeans.XmlInteger; import org.apache.xmlbeans.XmlObject; import org.apache.xmlbeans.XmlTime; import org.apache.xmlbeans.impl.util.Base64; import org.apache.xmlbeans.impl.util.HexBin; import org.apache.xmlbeans.soap.SOAPArrayType; import org.apache.xmlbeans.soap.SchemaWSDLArrayType; /** * fork of https://raw.githubusercontent.com/apache/xmlbeans/trunk/src/main/java/org/apache/xmlbeans/impl/xsd2inst/SampleXmlUtil.java * We want to create sample using existing XmlCursor */ public class SampleXmlUtil { private boolean _soapEnc; private static final int MAX_ELEMENTS = 1000; private int _nElements; public SampleXmlUtil() { this(false); } public SampleXmlUtil(boolean soapEnc) { _soapEnc = soapEnc; } Random _picker = new Random(1); /** * Cursor position * Before: * <theElement>^</theElement> * After: * <theElement><lots of stuff/>^</theElement> */ public void createSampleForType(SchemaType stype, XmlCursor xmlc) { if (_typeStack.contains(stype)) return; _typeStack.add(stype); try { if (stype.isSimpleType() || stype.isURType()) { processSimpleType(stype, xmlc); return; } // complex Type // <theElement>^</theElement> processAttributes(stype, xmlc); // <theElement attri1="string">^</theElement> switch (stype.getContentType()) { case SchemaType.NOT_COMPLEX_TYPE: case SchemaType.EMPTY_CONTENT: // noop break; case SchemaType.SIMPLE_CONTENT: { processSimpleType(stype, xmlc); } break; case SchemaType.MIXED_CONTENT: xmlc.insertChars(pick(WORDS) + " "); if (stype.getContentModel() != null) { processParticle(stype.getContentModel(), xmlc, true); } xmlc.insertChars(pick(WORDS)); break; case SchemaType.ELEMENT_CONTENT: if (stype.getContentModel() != null) { processParticle(stype.getContentModel(), xmlc, false); } break; } } finally { _typeStack.remove(_typeStack.size() - 1); } } private void processSimpleType(SchemaType stype, XmlCursor xmlc) { String sample = sampleDataForSimpleType(stype); xmlc.insertChars(sample); } private String sampleDataForSimpleType(SchemaType sType) { if (XmlObject.type.equals(sType)) return "anyType"; if (XmlAnySimpleType.type.equals(sType)) return "anySimpleType"; if (sType.getSimpleVariety() == SchemaType.LIST) { SchemaType itemType = sType.getListItemType(); StringBuilder sb = new StringBuilder(); int length = pickLength(sType); if (length > 0) sb.append(sampleDataForSimpleType(itemType)); for (int i = 1; i < length; i += 1) { sb.append(' '); sb.append(sampleDataForSimpleType(itemType)); } return sb.toString(); } if (sType.getSimpleVariety() == SchemaType.UNION) { SchemaType[] possibleTypes = sType.getUnionConstituentTypes(); if (possibleTypes.length == 0) return ""; return sampleDataForSimpleType(possibleTypes[pick(possibleTypes.length)]); } XmlAnySimpleType[] enumValues = sType.getEnumerationValues(); if (enumValues != null && enumValues.length > 0) { return enumValues[pick(enumValues.length)].getStringValue(); } switch (sType.getPrimitiveType().getBuiltinTypeCode()) { default: case SchemaType.BTC_NOT_BUILTIN: return ""; case SchemaType.BTC_ANY_TYPE: case SchemaType.BTC_ANY_SIMPLE: return "anything"; case SchemaType.BTC_BOOLEAN: return pick(2) == 0 ? "true" : "false"; case SchemaType.BTC_BASE_64_BINARY: { String result = null; try { result = new String(Base64.encode(formatToLength(pick(WORDS), sType).getBytes("utf-8"))); } catch (java.io.UnsupportedEncodingException e) { /* Can't possibly happen */ } return result; } case SchemaType.BTC_HEX_BINARY: return HexBin.encode(formatToLength(pick(WORDS), sType)); case SchemaType.BTC_ANY_URI: return formatToLength("http://www." + pick(DNS1) + "." + pick(DNS2) + "/" + pick(WORDS) + "/" + pick(WORDS), sType); case SchemaType.BTC_QNAME: return formatToLength("qname", sType); case SchemaType.BTC_NOTATION: return formatToLength("notation", sType); case SchemaType.BTC_FLOAT: return "1.5E2"; case SchemaType.BTC_DOUBLE: return "1.051732E7"; case SchemaType.BTC_DECIMAL: switch (closestBuiltin(sType).getBuiltinTypeCode()) { case SchemaType.BTC_SHORT: return formatDecimal("1", sType); case SchemaType.BTC_UNSIGNED_SHORT: return formatDecimal("5", sType); case SchemaType.BTC_BYTE: return formatDecimal("2", sType); case SchemaType.BTC_UNSIGNED_BYTE: return formatDecimal("6", sType); case SchemaType.BTC_INT: return formatDecimal("3", sType); case SchemaType.BTC_UNSIGNED_INT: return formatDecimal("7", sType); case SchemaType.BTC_LONG: return formatDecimal("10", sType); case SchemaType.BTC_UNSIGNED_LONG: return formatDecimal("11", sType); case SchemaType.BTC_INTEGER: return formatDecimal("100", sType); case SchemaType.BTC_NON_POSITIVE_INTEGER: return formatDecimal("-200", sType); case SchemaType.BTC_NEGATIVE_INTEGER: return formatDecimal("-201", sType); case SchemaType.BTC_NON_NEGATIVE_INTEGER: return formatDecimal("200", sType); case SchemaType.BTC_POSITIVE_INTEGER: return formatDecimal("201", sType); default: case SchemaType.BTC_DECIMAL: return formatDecimal("1000.00", sType); } case SchemaType.BTC_STRING: { String result; switch (closestBuiltin(sType).getBuiltinTypeCode()) { case SchemaType.BTC_STRING: case SchemaType.BTC_NORMALIZED_STRING: result = "string"; break; case SchemaType.BTC_TOKEN: result = "token"; break; default: result = "string"; break; } return formatToLength(result, sType); } case SchemaType.BTC_DURATION: return formatDuration(sType); case SchemaType.BTC_DATE_TIME: case SchemaType.BTC_TIME: case SchemaType.BTC_DATE: case SchemaType.BTC_G_YEAR_MONTH: case SchemaType.BTC_G_YEAR: case SchemaType.BTC_G_MONTH_DAY: case SchemaType.BTC_G_DAY: case SchemaType.BTC_G_MONTH: return formatDate(sType); } } // a bit from the Aenid public static final String[] WORDS = new String[] { "ipsa", "iovis", "rapidum", "iaculata", "e", "nubibus", "ignem", "disiecitque", "rates", "evertitque", "aequora", "ventis", "illum", "exspirantem", "transfixo", "pectore", "flammas", "turbine", "corripuit", "scopuloque", "infixit", "acuto", "ast", "ego", "quae", "divum", "incedo", "regina", "iovisque", "et", "soror", "et", "coniunx", "una", "cum", "gente", "tot", "annos", "bella", "gero", "et", "quisquam", "numen", "iunonis", "adorat", "praeterea", "aut", "supplex", "aris", "imponet", "honorem", "talia", "flammato", "secum", "dea", "corde", "volutans", "nimborum", "in", "patriam", "loca", "feta", "furentibus", "austris", "aeoliam", "venit", "hic", "vasto", "rex", "aeolus", "antro", "luctantis", "ventos", "tempestatesque", "sonoras", "imperio", "premit", "ac", "vinclis", "et", "carcere", "frenat", "illi", "indignantes", "magno", "cum", "murmure", "montis", "circum", "claustra", "fremunt", "celsa", "sedet", "aeolus", "arce", "sceptra", "tenens", "mollitque", "animos", "et", "temperat", "iras", "ni", "faciat", "maria", "ac", "terras", "caelumque", "profundum", "quippe", "ferant", "rapidi", "secum", "verrantque", "per", "auras", "sed", "pater", "omnipotens", "speluncis", "abdidit", "atris", "hoc", "metuens", "molemque", "et", "montis", "insuper", "altos", "imposuit", "regemque", "dedit", "qui", "foedere", "certo", "et", "premere", "et", "laxas", "sciret", "dare", "iussus", "habenas", }; private static final String[] DNS1 = new String[] { "corp", "your", "my", "sample", "company", "test", "any" }; private static final String[] DNS2 = new String[] { "com", "org", "com", "gov", "org", "com", "org", "com", "edu" }; private int pick(int n) { return _picker.nextInt(n); } private String pick(String[] a) { return a[pick(a.length)]; } private String pick(String[] a, int count) { if (count <= 0) return ""; int i = pick(a.length); StringBuilder sb = new StringBuilder(a[i]); while (count-- > 0) { i += 1; if (i >= a.length) i = 0; sb.append(' '); sb.append(a[i]); } return sb.toString(); } private String pickDigits(int digits) { StringBuilder sb = new StringBuilder(); while (digits-- > 0) sb.append(Integer.toString(pick(10))); return sb.toString(); } private int pickLength(SchemaType sType) { XmlInteger length = (XmlInteger) sType.getFacet(SchemaType.FACET_LENGTH); if (length != null) return length.getBigIntegerValue().intValue(); XmlInteger min = (XmlInteger) sType.getFacet(SchemaType.FACET_MIN_LENGTH); XmlInteger max = (XmlInteger) sType.getFacet(SchemaType.FACET_MAX_LENGTH); int minInt, maxInt; if (min == null) minInt = 0; else minInt = min.getBigIntegerValue().intValue(); if (max == null) maxInt = Integer.MAX_VALUE; else maxInt = max.getBigIntegerValue().intValue(); // We try to keep the length of the array within reasonable limits, // at least 1 item and at most 3 if possible if (minInt == 0 && maxInt >= 1) minInt = 1; if (maxInt > minInt + 2) maxInt = minInt + 2; if (maxInt < minInt) maxInt = minInt; return minInt + pick(maxInt - minInt); } /** * Formats a given string to the required length, using the following operations: * - append the source string to itself as necessary to pass the minLength; * - truncate the result of previous step, if necessary, to keep it within minLength. */ private String formatToLength(String s, SchemaType sType) { String result = s; try { SimpleValue min = (SimpleValue) sType.getFacet(SchemaType.FACET_LENGTH); if (min == null) min = (SimpleValue) sType.getFacet(SchemaType.FACET_MIN_LENGTH); if (min != null) { int len = min.getIntValue(); while (result.length() < len) result = result + result; } SimpleValue max = (SimpleValue) sType.getFacet(SchemaType.FACET_LENGTH); if (max == null) max = (SimpleValue) sType.getFacet(SchemaType.FACET_MAX_LENGTH); if (max != null) { int len = max.getIntValue(); if (result.length() > len) result = result.substring(0, len); } } catch (Exception e) {} // intValue can be out of range return result; } private String formatDecimal(String start, SchemaType sType) { BigDecimal result = new BigDecimal(start); XmlDecimal xmlD; xmlD = (XmlDecimal) sType.getFacet(SchemaType.FACET_MIN_INCLUSIVE); BigDecimal min = xmlD != null ? xmlD.getBigDecimalValue() : null; xmlD = (XmlDecimal) sType.getFacet(SchemaType.FACET_MAX_INCLUSIVE); BigDecimal max = xmlD != null ? xmlD.getBigDecimalValue() : null; boolean minInclusive = true, maxInclusive = true; xmlD = (XmlDecimal) sType.getFacet(SchemaType.FACET_MIN_EXCLUSIVE); if (xmlD != null) { BigDecimal minExcl = xmlD.getBigDecimalValue(); if (min == null || min.compareTo(minExcl) < 0) { min = minExcl; minInclusive = false; } } xmlD = (XmlDecimal) sType.getFacet(SchemaType.FACET_MAX_EXCLUSIVE); if (xmlD != null) { BigDecimal maxExcl = xmlD.getBigDecimalValue(); if (max == null || max.compareTo(maxExcl) > 0) { max = maxExcl; maxInclusive = false; } } xmlD = (XmlDecimal) sType.getFacet(SchemaType.FACET_TOTAL_DIGITS); int totalDigits = -1; if (xmlD != null) { totalDigits = xmlD.getBigDecimalValue().intValue(); StringBuilder sb = new StringBuilder(totalDigits); for (int i = 0; i < totalDigits; i++) sb.append('9'); BigDecimal digitsLimit = new BigDecimal(sb.toString()); if (max != null && max.compareTo(digitsLimit) > 0) { max = digitsLimit; maxInclusive = true; } digitsLimit = digitsLimit.negate(); if (min != null && min.compareTo(digitsLimit) < 0) { min = digitsLimit; minInclusive = true; } } int sigMin = min == null ? 1 : result.compareTo(min); int sigMax = max == null ? -1 : result.compareTo(max); boolean minOk = sigMin > 0 || sigMin == 0 && minInclusive; boolean maxOk = sigMax < 0 || sigMax == 0 && maxInclusive; // Compute the minimum increment xmlD = (XmlDecimal) sType.getFacet(SchemaType.FACET_FRACTION_DIGITS); int fractionDigits = -1; BigDecimal increment; if (xmlD == null) increment = new BigDecimal(1); else { fractionDigits = xmlD.getBigDecimalValue().intValue(); if (fractionDigits > 0) { StringBuilder sb = new StringBuilder("0."); for (int i = 1; i < fractionDigits; i++) sb.append('0'); sb.append('1'); increment = new BigDecimal(sb.toString()); } else increment = new BigDecimal(1.0); } if (minOk && maxOk) { // OK } else if (minOk && !maxOk) { // TOO BIG if (maxInclusive) result = max; else result = max.subtract(increment); } else if (!minOk && maxOk) { // TOO SMALL if (minInclusive) result = min; else result = min.add(increment); } else { // MIN > MAX!! } // We have the number // Adjust the scale according to the totalDigits and fractionDigits int digits = 0; BigDecimal ONE = new BigDecimal(BigInteger.ONE); for (BigDecimal n = result; n.abs().compareTo(ONE) >= 0; digits++) n = n.movePointLeft(1); if (fractionDigits > 0) if (totalDigits >= 0) result = result.setScale(Math.max(fractionDigits, totalDigits - digits)); else result = result.setScale(fractionDigits); else if ( fractionDigits == 0 ) result = result.setScale(0); return result.toString(); } private String formatDuration(SchemaType sType) { XmlDuration d = (XmlDuration) sType.getFacet(SchemaType.FACET_MIN_INCLUSIVE); GDuration minInclusive = null; if (d != null) minInclusive = d.getGDurationValue(); d = (XmlDuration) sType.getFacet(SchemaType.FACET_MAX_INCLUSIVE); GDuration maxInclusive = null; if (d != null) maxInclusive = d.getGDurationValue(); d = (XmlDuration) sType.getFacet(SchemaType.FACET_MIN_EXCLUSIVE); GDuration minExclusive = null; if (d != null) minExclusive = d.getGDurationValue(); d = (XmlDuration) sType.getFacet(SchemaType.FACET_MAX_EXCLUSIVE); GDuration maxExclusive = null; if (d != null) maxExclusive = d.getGDurationValue(); GDurationBuilder gdurb = new GDurationBuilder(); BigInteger min, max; gdurb.setSecond(pick(800000)); gdurb.setMonth(pick(20)); // Years // Months // Days // Hours // Minutes // Seconds // Fractions if (minInclusive != null) { if (gdurb.getYear() < minInclusive.getYear()) gdurb.setYear(minInclusive.getYear()); if (gdurb.getMonth() < minInclusive.getMonth()) gdurb.setMonth(minInclusive.getMonth()); if (gdurb.getDay() < minInclusive.getDay()) gdurb.setDay(minInclusive.getDay()); if (gdurb.getHour() < minInclusive.getHour()) gdurb.setHour(minInclusive.getHour()); if (gdurb.getMinute() < minInclusive.getMinute()) gdurb.setMinute(minInclusive.getMinute()); if (gdurb.getSecond() < minInclusive.getSecond()) gdurb.setSecond(minInclusive.getSecond()); if (gdurb.getFraction().compareTo(minInclusive.getFraction()) < 0) gdurb.setFraction(minInclusive.getFraction()); } if (maxInclusive != null) { if (gdurb.getYear() > maxInclusive.getYear()) gdurb.setYear(maxInclusive.getYear()); if (gdurb.getMonth() > maxInclusive.getMonth()) gdurb.setMonth(maxInclusive.getMonth()); if (gdurb.getDay() > maxInclusive.getDay()) gdurb.setDay(maxInclusive.getDay()); if (gdurb.getHour() > maxInclusive.getHour()) gdurb.setHour(maxInclusive.getHour()); if (gdurb.getMinute() > maxInclusive.getMinute()) gdurb.setMinute(maxInclusive.getMinute()); if (gdurb.getSecond() > maxInclusive.getSecond()) gdurb.setSecond(maxInclusive.getSecond()); if (gdurb.getFraction().compareTo(maxInclusive.getFraction()) > 0) gdurb.setFraction(maxInclusive.getFraction()); } if (minExclusive != null) { if (gdurb.getYear() <= minExclusive.getYear()) gdurb.setYear(minExclusive.getYear() + 1); if (gdurb.getMonth() <= minExclusive.getMonth()) gdurb.setMonth(minExclusive.getMonth() + 1); if (gdurb.getDay() <= minExclusive.getDay()) gdurb.setDay(minExclusive.getDay() + 1); if (gdurb.getHour() <= minExclusive.getHour()) gdurb.setHour(minExclusive.getHour() + 1); if (gdurb.getMinute() <= minExclusive.getMinute()) gdurb.setMinute(minExclusive.getMinute() + 1); if (gdurb.getSecond() <= minExclusive.getSecond()) gdurb.setSecond(minExclusive.getSecond() + 1); if (gdurb.getFraction().compareTo(minExclusive.getFraction()) <= 0) gdurb.setFraction( minExclusive.getFraction().add(new BigDecimal(0.001)) ); } if (maxExclusive != null) { if (gdurb.getYear() > maxExclusive.getYear()) gdurb.setYear(maxExclusive.getYear()); if (gdurb.getMonth() > maxExclusive.getMonth()) gdurb.setMonth(maxExclusive.getMonth()); if (gdurb.getDay() > maxExclusive.getDay()) gdurb.setDay(maxExclusive.getDay()); if (gdurb.getHour() > maxExclusive.getHour()) gdurb.setHour(maxExclusive.getHour()); if (gdurb.getMinute() > maxExclusive.getMinute()) gdurb.setMinute(maxExclusive.getMinute()); if (gdurb.getSecond() > maxExclusive.getSecond()) gdurb.setSecond(maxExclusive.getSecond()); if (gdurb.getFraction().compareTo(maxExclusive.getFraction()) > 0) gdurb.setFraction(maxExclusive.getFraction()); } gdurb.normalize(); return gdurb.toString(); } private String formatDate(SchemaType sType) { GDateBuilder gdateb = new GDateBuilder(new Date(1000L * pick(365 * 24 * 60 * 60) + (30L + pick(20)) * 365 * 24 * 60 * 60 * 1000)); GDate min = null, max = null; GDate temp; // Find the min and the max according to the type switch (sType.getPrimitiveType().getBuiltinTypeCode()) { case SchemaType.BTC_DATE_TIME: { XmlDateTime x = (XmlDateTime) sType.getFacet(SchemaType.FACET_MIN_INCLUSIVE); if (x != null) min = x.getGDateValue(); x = (XmlDateTime) sType.getFacet(SchemaType.FACET_MIN_EXCLUSIVE); if (x != null) if (min == null || min.compareToGDate(x.getGDateValue()) <= 0) min = x.getGDateValue(); x = (XmlDateTime) sType.getFacet(SchemaType.FACET_MAX_INCLUSIVE); if (x != null) max = x.getGDateValue(); x = (XmlDateTime) sType.getFacet(SchemaType.FACET_MAX_EXCLUSIVE); if (x != null) if (max == null || max.compareToGDate(x.getGDateValue()) >= 0) max = x.getGDateValue(); break; } case SchemaType.BTC_TIME: { XmlTime x = (XmlTime) sType.getFacet(SchemaType.FACET_MIN_INCLUSIVE); if (x != null) min = x.getGDateValue(); x = (XmlTime) sType.getFacet(SchemaType.FACET_MIN_EXCLUSIVE); if (x != null) if (min == null || min.compareToGDate(x.getGDateValue()) <= 0) min = x.getGDateValue(); x = (XmlTime) sType.getFacet(SchemaType.FACET_MAX_INCLUSIVE); if (x != null) max = x.getGDateValue(); x = (XmlTime) sType.getFacet(SchemaType.FACET_MAX_EXCLUSIVE); if (x != null) if (max == null || max.compareToGDate(x.getGDateValue()) >= 0) max = x.getGDateValue(); break; } case SchemaType.BTC_DATE: { XmlDate x = (XmlDate) sType.getFacet(SchemaType.FACET_MIN_INCLUSIVE); if (x != null) min = x.getGDateValue(); x = (XmlDate) sType.getFacet(SchemaType.FACET_MIN_EXCLUSIVE); if (x != null) if (min == null || min.compareToGDate(x.getGDateValue()) <= 0) min = x.getGDateValue(); x = (XmlDate) sType.getFacet(SchemaType.FACET_MAX_INCLUSIVE); if (x != null) max = x.getGDateValue(); x = (XmlDate) sType.getFacet(SchemaType.FACET_MAX_EXCLUSIVE); if (x != null) if (max == null || max.compareToGDate(x.getGDateValue()) >= 0) max = x.getGDateValue(); break; } case SchemaType.BTC_G_YEAR_MONTH: { XmlGYearMonth x = (XmlGYearMonth) sType.getFacet(SchemaType.FACET_MIN_INCLUSIVE); if (x != null) min = x.getGDateValue(); x = (XmlGYearMonth) sType.getFacet(SchemaType.FACET_MIN_EXCLUSIVE); if (x != null) if (min == null || min.compareToGDate(x.getGDateValue()) <= 0) min = x.getGDateValue(); x = (XmlGYearMonth) sType.getFacet(SchemaType.FACET_MAX_INCLUSIVE); if (x != null) max = x.getGDateValue(); x = (XmlGYearMonth) sType.getFacet(SchemaType.FACET_MAX_EXCLUSIVE); if (x != null) if (max == null || max.compareToGDate(x.getGDateValue()) >= 0) max = x.getGDateValue(); break; } case SchemaType.BTC_G_YEAR: { XmlGYear x = (XmlGYear) sType.getFacet(SchemaType.FACET_MIN_INCLUSIVE); if (x != null) min = x.getGDateValue(); x = (XmlGYear) sType.getFacet(SchemaType.FACET_MIN_EXCLUSIVE); if (x != null) if (min == null || min.compareToGDate(x.getGDateValue()) <= 0) min = x.getGDateValue(); x = (XmlGYear) sType.getFacet(SchemaType.FACET_MAX_INCLUSIVE); if (x != null) max = x.getGDateValue(); x = (XmlGYear) sType.getFacet(SchemaType.FACET_MAX_EXCLUSIVE); if (x != null) if (max == null || max.compareToGDate(x.getGDateValue()) >= 0) max = x.getGDateValue(); break; } case SchemaType.BTC_G_MONTH_DAY: { XmlGMonthDay x = (XmlGMonthDay) sType.getFacet(SchemaType.FACET_MIN_INCLUSIVE); if (x != null) min = x.getGDateValue(); x = (XmlGMonthDay) sType.getFacet(SchemaType.FACET_MIN_EXCLUSIVE); if (x != null) if (min == null || min.compareToGDate(x.getGDateValue()) <= 0) min = x.getGDateValue(); x = (XmlGMonthDay) sType.getFacet(SchemaType.FACET_MAX_INCLUSIVE); if (x != null) max = x.getGDateValue(); x = (XmlGMonthDay) sType.getFacet(SchemaType.FACET_MAX_EXCLUSIVE); if (x != null) if (max == null || max.compareToGDate(x.getGDateValue()) >= 0) max = x.getGDateValue(); break; } case SchemaType.BTC_G_DAY: { XmlGDay x = (XmlGDay) sType.getFacet(SchemaType.FACET_MIN_INCLUSIVE); if (x != null) min = x.getGDateValue(); x = (XmlGDay) sType.getFacet(SchemaType.FACET_MIN_EXCLUSIVE); if (x != null) if (min == null || min.compareToGDate(x.getGDateValue()) <= 0) min = x.getGDateValue(); x = (XmlGDay) sType.getFacet(SchemaType.FACET_MAX_INCLUSIVE); if (x != null) max = x.getGDateValue(); x = (XmlGDay) sType.getFacet(SchemaType.FACET_MAX_EXCLUSIVE); if (x != null) if (max == null || max.compareToGDate(x.getGDateValue()) >= 0) max = x.getGDateValue(); break; } case SchemaType.BTC_G_MONTH: { XmlGMonth x = (XmlGMonth) sType.getFacet(SchemaType.FACET_MIN_INCLUSIVE); if (x != null) min = x.getGDateValue(); x = (XmlGMonth) sType.getFacet(SchemaType.FACET_MIN_EXCLUSIVE); if (x != null) if (min == null || min.compareToGDate(x.getGDateValue()) <= 0) min = x.getGDateValue(); x = (XmlGMonth) sType.getFacet(SchemaType.FACET_MAX_INCLUSIVE); if (x != null) max = x.getGDateValue(); x = (XmlGMonth) sType.getFacet(SchemaType.FACET_MAX_EXCLUSIVE); if (x != null) if (max == null || max.compareToGDate(x.getGDateValue()) >= 0) max = x.getGDateValue(); break; } } if (min != null && max == null) { if (min.compareToGDate(gdateb) >= 0) { // Reset the date to min + (1-8) hours Calendar c = gdateb.getCalendar(); c.add(Calendar.HOUR_OF_DAY, pick(8)); gdateb = new GDateBuilder(c); } } else if (min == null && max != null) { if (max.compareToGDate(gdateb) <= 0) { // Reset the date to max - (1-8) hours Calendar c = gdateb.getCalendar(); c.add(Calendar.HOUR_OF_DAY, 0 - pick(8)); gdateb = new GDateBuilder(c); } } else if (min != null && max != null) { if (min.compareToGDate(gdateb) >= 0 || max.compareToGDate(gdateb) <= 0) { // Find a date between the two Calendar c = min.getCalendar(); Calendar cmax = max.getCalendar(); c.add(Calendar.HOUR_OF_DAY, 1); if (c.after(cmax)) { c.add(Calendar.HOUR_OF_DAY, -1); c.add(Calendar.MINUTE, 1); if (c.after(cmax)) { c.add(Calendar.MINUTE, -1); c.add(Calendar.SECOND, 1); if (c.after(cmax)) { c.add(Calendar.SECOND, -1); c.add(Calendar.MILLISECOND, 1); if (c.after(cmax)) c.add(Calendar.MILLISECOND, -1); } } } gdateb = new GDateBuilder(c); } } gdateb.setBuiltinTypeCode(sType.getPrimitiveType().getBuiltinTypeCode()); if (pick(2) == 0) gdateb.clearTimeZone(); return gdateb.toString(); } private SchemaType closestBuiltin(SchemaType sType) { while (!sType.isBuiltinType()) sType = sType.getBaseType(); return sType; } /** * Cracks a combined QName of the form URL:localname */ public static QName crackQName(String qName) { String ns; String name; int index = qName.lastIndexOf(':'); if (index >= 0) { ns = qName.substring(0, index); name = qName.substring(index + 1); } else { ns = ""; name = qName; } return new QName(ns, name); } /** * Cursor position: * Before this call: * <outer><foo/>^</outer> (cursor at the ^) * After this call: * <<outer><foo/><bar/>som text<etc/>^</outer> */ private void processParticle(SchemaParticle sp, XmlCursor xmlc, boolean mixed) { int loop = determineMinMaxForSample(sp, xmlc); while (loop-- > 0) { switch (sp.getParticleType()) { case (SchemaParticle.ELEMENT): processElement(sp, xmlc, mixed); break; case (SchemaParticle.SEQUENCE): processSequence(sp, xmlc, mixed); break; case (SchemaParticle.CHOICE): processChoice(sp, xmlc, mixed); break; case (SchemaParticle.ALL): processAll(sp, xmlc, mixed); break; case (SchemaParticle.WILDCARD): processWildCard(sp, xmlc, mixed); break; default: // throw new Exception("No Match on Schema Particle Type: " + String.valueOf(sp.getParticleType())); } } } private int determineMinMaxForSample(SchemaParticle sp, XmlCursor xmlc) { int minOccurs = sp.getIntMinOccurs(); int maxOccurs = sp.getIntMaxOccurs(); if (minOccurs == maxOccurs) return minOccurs; int result = minOccurs; if (result == 0 && _nElements < MAX_ELEMENTS) result = 1; if (sp.getParticleType() != SchemaParticle.ELEMENT) return result; // it probably only makes sense to put comments in front of individual elements that repeat if (sp.getMaxOccurs() == null) { // xmlc.insertComment("The next " + getItemNameOrType(sp, xmlc) + " may be repeated " + minOccurs + " or more times"); if (minOccurs == 0) xmlc.insertComment("Zero or more repetitions:"); else xmlc.insertComment( minOccurs + " or more repetitions:" ); } else if (sp.getIntMaxOccurs() > 1) { xmlc.insertComment(minOccurs + " to " + String.valueOf(sp.getMaxOccurs()) + " repetitions:"); } else { xmlc.insertComment("Optional:"); } return result; } /* Return a name for the element or the particle type to use in the comment for minoccurs, max occurs */ private String getItemNameOrType(SchemaParticle sp, XmlCursor xmlc) { String elementOrTypeName = null; if (sp.getParticleType() == SchemaParticle.ELEMENT) { elementOrTypeName = "Element (" + sp.getName().getLocalPart() + ")"; } else { elementOrTypeName = printParticleType(sp.getParticleType()); } return elementOrTypeName; } private void processElement(SchemaParticle sp, XmlCursor xmlc, boolean mixed) { // cast as schema local element SchemaLocalElement element = (SchemaLocalElement) sp; /// ^ -> <elemenname></elem>^ if (_soapEnc) xmlc.insertElement(element.getName().getLocalPart()); // soap encoded? drop namespaces. else xmlc.insertElement(element.getName().getLocalPart(), element.getName().getNamespaceURI()); _nElements++; /// -> <elem>^</elem> xmlc.toPrevToken(); // -> <elem>stuff^</elem> createSampleForType(element.getType(), xmlc); // -> <elem>stuff</elem>^ xmlc.toNextToken(); } private void moveToken(int numToMove, XmlCursor xmlc) { for (int i = 0; i < Math.abs(numToMove); i++) { if (numToMove < 0) { xmlc.toPrevToken(); } else { xmlc.toNextToken(); } } } private static final String formatQName(XmlCursor xmlc, QName qName) { XmlCursor parent = xmlc.newCursor(); parent.toParent(); String prefix = parent.prefixForNamespace(qName.getNamespaceURI()); parent.dispose(); String name; if (prefix == null || prefix.length() == 0) name = qName.getLocalPart(); else name = prefix + ":" + qName.getLocalPart(); return name; } private static final QName HREF = new QName("href"); private static final QName ID = new QName("id"); public static final QName XSI_TYPE = new QName("http://www.w3.org/2001/XMLSchema-instance", "type", "xsi"); private static final QName ENC_ARRAYTYPE = new QName("http://schemas.xmlsoap.org/soap/encoding/", "arrayType"); private static final QName ENC_OFFSET = new QName("http://schemas.xmlsoap.org/soap/encoding/", "offset"); private static final Set SKIPPED_SOAP_ATTRS = new HashSet(Arrays.asList(new QName[] { HREF, ID, ENC_OFFSET })); private void processAttributes(SchemaType stype, XmlCursor xmlc) { if (_soapEnc) { QName typeName = stype.getName(); if (typeName != null) { xmlc.insertAttributeWithValue(XSI_TYPE, formatQName(xmlc, typeName)); } } SchemaProperty[] attrProps = stype.getAttributeProperties(); for (int i = 0; i < attrProps.length; i++) { SchemaProperty attr = attrProps[i]; if (_soapEnc) { if (SKIPPED_SOAP_ATTRS.contains(attr.getName())) continue; if (ENC_ARRAYTYPE.equals(attr.getName())) { SOAPArrayType arrayType = ((SchemaWSDLArrayType) stype.getAttributeModel().getAttribute(attr.getName())).getWSDLArrayType(); if (arrayType != null) xmlc.insertAttributeWithValue( attr.getName(), formatQName(xmlc, arrayType.getQName()) + arrayType.soap11DimensionString() ); continue; } } String defaultValue = attr.getDefaultText(); xmlc.insertAttributeWithValue(attr.getName(), defaultValue == null ? sampleDataForSimpleType(attr.getType()) : defaultValue); } } private void processSequence(SchemaParticle sp, XmlCursor xmlc, boolean mixed) { SchemaParticle[] spc = sp.getParticleChildren(); for (int i = 0; i < spc.length; i++) { /// <parent>maybestuff^</parent> processParticle(spc[i], xmlc, mixed); //<parent>maybestuff...morestuff^</parent> if (mixed && i < spc.length - 1) xmlc.insertChars(pick(WORDS)); } } private void processChoice(SchemaParticle sp, XmlCursor xmlc, boolean mixed) { SchemaParticle[] spc = sp.getParticleChildren(); xmlc.insertComment("You have a CHOICE of the next " + String.valueOf(spc.length) + " items at this level"); for (int i = 0; i < spc.length; i++) { processParticle(spc[i], xmlc, mixed); } } private void processAll(SchemaParticle sp, XmlCursor xmlc, boolean mixed) { SchemaParticle[] spc = sp.getParticleChildren(); // xmlc.insertComment("You may enter the following " + String.valueOf(spc.length) + " items in any order"); for (int i = 0; i < spc.length; i++) { processParticle(spc[i], xmlc, mixed); if (mixed && i < spc.length - 1) xmlc.insertChars(pick(WORDS)); } } private void processWildCard(SchemaParticle sp, XmlCursor xmlc, boolean mixed) { xmlc.insertComment("You may enter ANY elements at this point"); xmlc.insertElement("AnyElement"); } /** * This method will get the base type for the schema type */ private static QName getClosestName(SchemaType sType) { while (sType.getName() == null) sType = sType.getBaseType(); return sType.getName(); } private String printParticleType(int particleType) { StringBuilder returnParticleType = new StringBuilder(); returnParticleType.append("Schema Particle Type: "); switch (particleType) { case SchemaParticle.ALL: returnParticleType.append("ALL\n"); break; case SchemaParticle.CHOICE: returnParticleType.append("CHOICE\n"); break; case SchemaParticle.ELEMENT: returnParticleType.append("ELEMENT\n"); break; case SchemaParticle.SEQUENCE: returnParticleType.append("SEQUENCE\n"); break; case SchemaParticle.WILDCARD: returnParticleType.append("WILDCARD\n"); break; default: returnParticleType.append("Schema Particle Type Unknown"); break; } return returnParticleType.toString(); } private ArrayList _typeStack = new ArrayList(); }
package org.spongycastle.asn1.test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigInteger; import java.text.ParseException; import java.util.Date; import java.util.Hashtable; import java.util.Vector; import org.spongycastle.asn1.ASN1EncodableVector; import org.spongycastle.asn1.ASN1GeneralizedTime; import org.spongycastle.asn1.ASN1InputStream; import org.spongycastle.asn1.ASN1Integer; import org.spongycastle.asn1.ASN1OutputStream; import org.spongycastle.asn1.ASN1Primitive; import org.spongycastle.asn1.DERNull; import org.spongycastle.asn1.DEROctetString; import org.spongycastle.asn1.DERSequence; import org.spongycastle.asn1.oiw.ElGamalParameter; import org.spongycastle.asn1.oiw.OIWObjectIdentifiers; import org.spongycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.spongycastle.asn1.pkcs.RSAPublicKey; import org.spongycastle.asn1.x500.X500Name; import org.spongycastle.asn1.x509.AlgorithmIdentifier; import org.spongycastle.asn1.x509.AuthorityKeyIdentifier; import org.spongycastle.asn1.x509.CRLReason; import org.spongycastle.asn1.x509.Extension; import org.spongycastle.asn1.x509.Extensions; import org.spongycastle.asn1.x509.ExtensionsGenerator; import org.spongycastle.asn1.x509.GeneralName; import org.spongycastle.asn1.x509.GeneralNames; import org.spongycastle.asn1.x509.IssuingDistributionPoint; import org.spongycastle.asn1.x509.KeyUsage; import org.spongycastle.asn1.x509.SubjectKeyIdentifier; import org.spongycastle.asn1.x509.SubjectPublicKeyInfo; import org.spongycastle.asn1.x509.TBSCertList; import org.spongycastle.asn1.x509.TBSCertificate; import org.spongycastle.asn1.x509.Time; import org.spongycastle.asn1.x509.V1TBSCertificateGenerator; import org.spongycastle.asn1.x509.V2TBSCertListGenerator; import org.spongycastle.asn1.x509.V3TBSCertificateGenerator; import org.spongycastle.asn1.x509.X509Extension; import org.spongycastle.asn1.x509.X509Extensions; import org.spongycastle.crypto.Digest; import org.spongycastle.crypto.digests.SHA1Digest; import org.spongycastle.util.Arrays; import org.spongycastle.util.encoders.Base64; import org.spongycastle.util.test.SimpleTest; public class GenerationTest extends SimpleTest { private byte[] v1Cert = Base64.decode( "MIGtAgEBMA0GCSqGSIb3DQEBBAUAMCUxCzAJBgNVBAMMAkFVMRYwFAYDVQQKDA1Cb" + "3VuY3kgQ2FzdGxlMB4XDTcwMDEwMTAwMDAwMVoXDTcwMDEwMTAwMDAxMlowNjELMA" + "kGA1UEAwwCQVUxFjAUBgNVBAoMDUJvdW5jeSBDYXN0bGUxDzANBgNVBAsMBlRlc3Q" + "gMTAaMA0GCSqGSIb3DQEBAQUAAwkAMAYCAQECAQI="); private byte[] v3Cert = Base64.decode( "MIIBSKADAgECAgECMA0GCSqGSIb3DQEBBAUAMCUxCzAJBgNVBAMMAkFVMRYwFAYD" + "VQQKDA1Cb3VuY3kgQ2FzdGxlMB4XDTcwMDEwMTAwMDAwMVoXDTcwMDEwMTAwMDAw" + "MlowNjELMAkGA1UEAwwCQVUxFjAUBgNVBAoMDUJvdW5jeSBDYXN0bGUxDzANBgNV" + "BAsMBlRlc3QgMjAYMBAGBisOBwIBATAGAgEBAgECAwQAAgEDo4GVMIGSMGEGA1Ud" + "IwEB/wRXMFWAFDZPdpHPzKi7o8EJokkQU2uqCHRRoTqkODA2MQswCQYDVQQDDAJB" + "VTEWMBQGA1UECgwNQm91bmN5IENhc3RsZTEPMA0GA1UECwwGVGVzdCAyggECMCAG" + "A1UdDgEB/wQWBBQ2T3aRz8you6PBCaJJEFNrqgh0UTALBgNVHQ8EBAMCBBA="); private byte[] v3CertNullSubject = Base64.decode( "MIHGoAMCAQICAQIwDQYJKoZIhvcNAQEEBQAwJTELMAkGA1UEAwwCQVUxFjAUBgNVB" + "AoMDUJvdW5jeSBDYXN0bGUwHhcNNzAwMTAxMDAwMDAxWhcNNzAwMTAxMDAwMDAyWj" + "AAMBgwEAYGKw4HAgEBMAYCAQECAQIDBAACAQOjSjBIMEYGA1UdEQEB/wQ8MDqkODA" + "2MQswCQYDVQQDDAJBVTEWMBQGA1UECgwNQm91bmN5IENhc3RsZTEPMA0GA1UECwwG" + "VGVzdCAy"); private byte[] v2CertList = Base64.decode( "MIIBQwIBATANBgkqhkiG9w0BAQUFADAlMQswCQYDVQQDDAJBVTEWMBQGA1UECgwN" + "Qm91bmN5IENhc3RsZRcNNzAwMTAxMDAwMDAwWhcNNzAwMTAxMDAwMDAyWjAiMCAC" + "AQEXDTcwMDEwMTAwMDAwMVowDDAKBgNVHRUEAwoBCqCBxTCBwjBhBgNVHSMBAf8E" + "VzBVgBQ2T3aRz8you6PBCaJJEFNrqgh0UaE6pDgwNjELMAkGA1UEAwwCQVUxFjAU" + "BgNVBAoMDUJvdW5jeSBDYXN0bGUxDzANBgNVBAsMBlRlc3QgMoIBAjBDBgNVHRIE" + "PDA6pDgwNjELMAkGA1UEAwwCQVUxFjAUBgNVBAoMDUJvdW5jeSBDYXN0bGUxDzAN" + "BgNVBAsMBlRlc3QgMzAKBgNVHRQEAwIBATAMBgNVHRwBAf8EAjAA"); private void tbsV1CertGen() throws IOException { V1TBSCertificateGenerator gen = new V1TBSCertificateGenerator(); Date startDate = new Date(1000); Date endDate = new Date(12000); gen.setSerialNumber(new ASN1Integer(1)); gen.setStartDate(new Time(startDate)); gen.setEndDate(new Time(endDate)); gen.setIssuer(new X500Name("CN=AU,O=Bouncy Castle")); gen.setSubject(new X500Name("CN=AU,O=Bouncy Castle,OU=Test 1")); gen.setSignature(new AlgorithmIdentifier(PKCSObjectIdentifiers.md5WithRSAEncryption, DERNull.INSTANCE)); SubjectPublicKeyInfo info = new SubjectPublicKeyInfo(new AlgorithmIdentifier(PKCSObjectIdentifiers.rsaEncryption, DERNull.INSTANCE), new RSAPublicKey(BigInteger.valueOf(1), BigInteger.valueOf(2))); gen.setSubjectPublicKeyInfo(info); TBSCertificate tbs = gen.generateTBSCertificate(); ByteArrayOutputStream bOut = new ByteArrayOutputStream(); ASN1OutputStream aOut = new ASN1OutputStream(bOut); aOut.writeObject(tbs); if (!Arrays.areEqual(bOut.toByteArray(), v1Cert)) { fail("failed v1 cert generation"); } // // read back test // ASN1InputStream aIn = new ASN1InputStream(new ByteArrayInputStream(v1Cert)); ASN1Primitive o = aIn.readObject(); bOut = new ByteArrayOutputStream(); aOut = new ASN1OutputStream(bOut); aOut.writeObject(o); if (!Arrays.areEqual(bOut.toByteArray(), v1Cert)) { fail("failed v1 cert read back test"); } } private AuthorityKeyIdentifier createAuthorityKeyId( SubjectPublicKeyInfo info, X500Name name, int sNumber) { GeneralName genName = new GeneralName(name); ASN1EncodableVector v = new ASN1EncodableVector(); v.add(genName); return new AuthorityKeyIdentifier( info, GeneralNames.getInstance(new DERSequence(v)), BigInteger.valueOf(sNumber)); } private void tbsV3CertGen() throws IOException { V3TBSCertificateGenerator gen = new V3TBSCertificateGenerator(); Date startDate = new Date(1000); Date endDate = new Date(2000); gen.setSerialNumber(new ASN1Integer(2)); gen.setStartDate(new Time(startDate)); gen.setEndDate(new Time(endDate)); gen.setIssuer(new X500Name("CN=AU,O=Bouncy Castle")); gen.setSubject(new X500Name("CN=AU,O=Bouncy Castle,OU=Test 2")); gen.setSignature(new AlgorithmIdentifier(PKCSObjectIdentifiers.md5WithRSAEncryption, DERNull.INSTANCE)); SubjectPublicKeyInfo info = new SubjectPublicKeyInfo(new AlgorithmIdentifier(OIWObjectIdentifiers.elGamalAlgorithm, new ElGamalParameter(BigInteger.valueOf(1), BigInteger.valueOf(2))), new ASN1Integer(3)); gen.setSubjectPublicKeyInfo(info); // // add extensions // Vector order = new Vector(); Hashtable extensions = new Hashtable(); order.addElement(X509Extension.authorityKeyIdentifier); order.addElement(X509Extension.subjectKeyIdentifier); order.addElement(X509Extension.keyUsage); extensions.put(X509Extension.authorityKeyIdentifier, new X509Extension(true, new DEROctetString(createAuthorityKeyId(info, new X500Name("CN=AU,O=Bouncy Castle,OU=Test 2"), 2)))); extensions.put(X509Extension.subjectKeyIdentifier, new X509Extension(true, new DEROctetString(new SubjectKeyIdentifier(getDigest(info))))); extensions.put(X509Extension.keyUsage, new X509Extension(false, new DEROctetString(new KeyUsage(KeyUsage.dataEncipherment)))); X509Extensions ex = new X509Extensions(order, extensions); gen.setExtensions(ex); TBSCertificate tbs = gen.generateTBSCertificate(); ByteArrayOutputStream bOut = new ByteArrayOutputStream(); ASN1OutputStream aOut = new ASN1OutputStream(bOut); aOut.writeObject(tbs); if (!Arrays.areEqual(bOut.toByteArray(), v3Cert)) { fail("failed v3 cert generation"); } // // read back test // ASN1InputStream aIn = new ASN1InputStream(new ByteArrayInputStream(v3Cert)); ASN1Primitive o = aIn.readObject(); bOut = new ByteArrayOutputStream(); aOut = new ASN1OutputStream(bOut); aOut.writeObject(o); if (!Arrays.areEqual(bOut.toByteArray(), v3Cert)) { fail("failed v3 cert read back test"); } } private void tbsV3CertGenWithNullSubject() throws IOException { V3TBSCertificateGenerator gen = new V3TBSCertificateGenerator(); Date startDate = new Date(1000); Date endDate = new Date(2000); gen.setSerialNumber(new ASN1Integer(2)); gen.setStartDate(new Time(startDate)); gen.setEndDate(new Time(endDate)); gen.setIssuer(new X500Name("CN=AU,O=Bouncy Castle")); gen.setSignature(new AlgorithmIdentifier(PKCSObjectIdentifiers.md5WithRSAEncryption, DERNull.INSTANCE)); SubjectPublicKeyInfo info = new SubjectPublicKeyInfo(new AlgorithmIdentifier(OIWObjectIdentifiers.elGamalAlgorithm, new ElGamalParameter(BigInteger.valueOf(1), BigInteger.valueOf(2))), new ASN1Integer(3)); gen.setSubjectPublicKeyInfo(info); try { gen.generateTBSCertificate(); fail("null subject not caught!"); } catch (IllegalStateException e) { if (!e.getMessage().equals("not all mandatory fields set in V3 TBScertificate generator")) { fail("unexpected exception", e); } } // // add extensions // Vector order = new Vector(); Hashtable extensions = new Hashtable(); order.addElement(X509Extension.subjectAlternativeName); extensions.put(X509Extension.subjectAlternativeName, new X509Extension(true, new DEROctetString(new GeneralNames(new GeneralName(new X500Name("CN=AU,O=Bouncy Castle,OU=Test 2")))))); X509Extensions ex = new X509Extensions(order, extensions); gen.setExtensions(ex); TBSCertificate tbs = gen.generateTBSCertificate(); ByteArrayOutputStream bOut = new ByteArrayOutputStream(); ASN1OutputStream aOut = new ASN1OutputStream(bOut); aOut.writeObject(tbs); if (!Arrays.areEqual(bOut.toByteArray(), v3CertNullSubject)) { fail("failed v3 null sub cert generation"); } // // read back test // ASN1InputStream aIn = new ASN1InputStream(new ByteArrayInputStream(v3CertNullSubject)); ASN1Primitive o = aIn.readObject(); bOut = new ByteArrayOutputStream(); aOut = new ASN1OutputStream(bOut); aOut.writeObject(o); if (!Arrays.areEqual(bOut.toByteArray(), v3CertNullSubject)) { fail("failed v3 null sub cert read back test"); } } private void tbsV2CertListGen() throws IOException { V2TBSCertListGenerator gen = new V2TBSCertListGenerator(); gen.setIssuer(new X500Name("CN=AU,O=Bouncy Castle")); gen.addCRLEntry(new ASN1Integer(1), new Time(new Date(1000)), CRLReason.aACompromise); gen.setNextUpdate(new Time(new Date(2000))); gen.setThisUpdate(new Time(new Date(500))); gen.setSignature(new AlgorithmIdentifier(PKCSObjectIdentifiers.sha1WithRSAEncryption, DERNull.INSTANCE)); // // extensions // SubjectPublicKeyInfo info = new SubjectPublicKeyInfo(new AlgorithmIdentifier(OIWObjectIdentifiers.elGamalAlgorithm, new ElGamalParameter(BigInteger.valueOf(1), BigInteger.valueOf(2))), new ASN1Integer(3)); ExtensionsGenerator extGen = new ExtensionsGenerator(); extGen.addExtension(Extension.authorityKeyIdentifier, true, createAuthorityKeyId(info, new X500Name("CN=AU,O=Bouncy Castle,OU=Test 2"), 2)); extGen.addExtension(Extension.issuerAlternativeName, false, new GeneralNames(new GeneralName(new X500Name("CN=AU,O=Bouncy Castle,OU=Test 3")))); extGen.addExtension(Extension.cRLNumber, false, new ASN1Integer(1)); extGen.addExtension(Extension.issuingDistributionPoint, true, IssuingDistributionPoint.getInstance(new DERSequence())); Extensions ex = extGen.generate(); gen.setExtensions(ex); TBSCertList tbs = gen.generateTBSCertList(); ByteArrayOutputStream bOut = new ByteArrayOutputStream(); ASN1OutputStream aOut = new ASN1OutputStream(bOut); aOut.writeObject(tbs); if (!Arrays.areEqual(bOut.toByteArray(), v2CertList)) { System.out.println(new String(Base64.encode(bOut.toByteArray()))); fail("failed v2 cert list generation"); } // // read back test // ASN1InputStream aIn = new ASN1InputStream(new ByteArrayInputStream(v2CertList)); ASN1Primitive o = aIn.readObject(); bOut = new ByteArrayOutputStream(); aOut = new ASN1OutputStream(bOut); aOut.writeObject(o); if (!Arrays.areEqual(bOut.toByteArray(), v2CertList)) { fail("failed v2 cert list read back test"); } // // check we can add a custom reason // gen.addCRLEntry(new ASN1Integer(1), new Time(new Date(1000)), CRLReason.aACompromise); // // check invalidity date gen.addCRLEntry(new ASN1Integer(2), new Time(new Date(1000)), CRLReason.affiliationChanged, new ASN1GeneralizedTime(new Date(2000))); TBSCertList crl = gen.generateTBSCertList(); TBSCertList.CRLEntry[] entries = crl.getRevokedCertificates(); for (int i = 0; i != entries.length; i++) { TBSCertList.CRLEntry entry = entries[i]; if (entry.getUserCertificate().equals(new ASN1Integer(1))) { Extensions extensions = entry.getExtensions(); Extension ext = extensions.getExtension(Extension.reasonCode); CRLReason r = CRLReason.getInstance(ext.getParsedValue()); if (r.getValue().intValue() != CRLReason.aACompromise) { fail("reason code mismatch"); } } else if (entry.getUserCertificate().equals(new ASN1Integer(2))) { Extensions extensions = entry.getExtensions(); Extension ext = extensions.getExtension(Extension.reasonCode); CRLReason r = CRLReason.getInstance(ext.getParsedValue()); if (r.getValue().intValue() != CRLReason.affiliationChanged) { fail("reason code mismatch"); } ext = extensions.getExtension(Extension.invalidityDate); ASN1GeneralizedTime t = ASN1GeneralizedTime.getInstance(ext.getParsedValue()); try { if (!t.getDate().equals(new Date(2000))) { fail("invalidity date mismatch"); } } catch (ParseException e) { fail("can't parse date", e); } } } } public void performTest() throws Exception { tbsV1CertGen(); tbsV3CertGen(); tbsV3CertGenWithNullSubject(); tbsV2CertListGen(); } public String getName() { return "Generation"; } private static byte[] getDigest(SubjectPublicKeyInfo spki) { Digest digest = new SHA1Digest(); byte[] resBuf = new byte[digest.getDigestSize()]; byte[] bytes = spki.getPublicKeyData().getBytes(); digest.update(bytes, 0, bytes.length); digest.doFinal(resBuf, 0); return resBuf; } public static void main( String[] args) { runTest(new GenerationTest()); } }
/* * Copyright 2014 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.newtypes; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import com.google.javascript.rhino.JSTypeExpression; import java.util.Collection; /** * * @author blickly@google.com (Ben Lickly) * @author dimvar@google.com (Dimitris Vardoulakis) * * Represents an enumerated type. * Each enum declaration produces two types of interest: * - We represent the object literal that defined the enum as an ObjectType. * - We represent an element of the enum by using this class in JSType. */ public class EnumType extends TypeWithProperties { private enum State { NOT_RESOLVED, DURING_RESOLUTION, RESOLVED } private State state; private JSTypeExpression typeExpr; private String name; // The type that accompanies the enum declaration private JSType declaredType; // The type of the enum's properties, a subtype of the previous field. private JSType enumPropType; // The type of the object literal that defines the enum private JSType enumObjType; // All properties have the same type, so we only need a set, not a map. private ImmutableSet<String> props; private EnumType( String name, JSTypeExpression typeExpr, Collection<String> props) { Preconditions.checkNotNull(typeExpr); this.state = State.NOT_RESOLVED; this.name = name; // typeExpr is non-null iff the enum is not resolved this.typeExpr = typeExpr; this.props = ImmutableSet.copyOf(props); } public static EnumType make( String name, JSTypeExpression typeExpr, Collection<String> props) { return new EnumType(name, typeExpr, props); } public boolean isResolved() { return state == State.RESOLVED; } public JSType getEnumeratedType() { Preconditions.checkState(state == State.RESOLVED); return declaredType; } public JSType getPropType() { Preconditions.checkState(state == State.RESOLVED); return enumPropType; } public JSType getObjLitType() { Preconditions.checkState(state == State.RESOLVED); return enumObjType; } // Returns null iff there is a type cycle public JSTypeExpression getTypeExpr() { Preconditions.checkState(state != State.RESOLVED); if (state == State.DURING_RESOLUTION) { return null; } state = State.DURING_RESOLUTION; return typeExpr; } public JSTypeExpression getTypeExprForErrorReporting() { Preconditions.checkState(state == State.DURING_RESOLUTION); return typeExpr; } void resolveEnum(JSType t) { Preconditions.checkNotNull(t); if (state == State.RESOLVED) { return; } Preconditions.checkState(state == State.DURING_RESOLUTION, "Expected state DURING_RESOLUTION but found %s", state.toString()); state = State.RESOLVED; typeExpr = null; declaredType = t; enumPropType = JSType.fromEnum(this); enumObjType = computeObjType(); } /** * When defining an enum such as * /** @enum {number} * / * var X = { ONE: 1, TWO: 2 }; * the properties of the object literal are constant. */ private JSType computeObjType() { Preconditions.checkState(enumPropType != null); PersistentMap<String, Property> propMap = PersistentMap.create(); for (String s : props) { propMap = propMap.with(s, Property.makeConstant(enumPropType, enumPropType)); } return JSType.fromObjectType( ObjectType.makeObjectType( null, propMap, null, false, ObjectKind.UNRESTRICTED)); } @Override protected JSType getProp(QualifiedName qname) { return declaredType.getProp(qname); } @Override protected JSType getDeclaredProp(QualifiedName qname) { return declaredType.getDeclaredProp(qname); } @Override protected boolean mayHaveProp(QualifiedName qname) { return declaredType.mayHaveProp(qname); } @Override protected boolean hasProp(QualifiedName qname) { return declaredType.hasProp(qname); } @Override protected boolean hasConstantProp(QualifiedName qname) { return declaredType.hasConstantProp(qname); } static boolean hasNonScalar(ImmutableSet<EnumType> enums) { if (enums == null) { return false; } for (EnumType e : enums) { if (e.declaredType.hasNonScalar()) { return true; } } return false; } static ImmutableSet<EnumType> union( ImmutableSet<EnumType> s1, ImmutableSet<EnumType> s2) { if (s1 == null) { return s2; } if (s2 == null || s1.equals(s2)) { return s1; } return Sets.union(s1, s2).immutableCopy(); } // We normalize the type so that it doesn't contain both enum {T1} and T1. static ImmutableSet<EnumType> normalizeForJoin( ImmutableSet<EnumType> newEnums, JSType joinWithoutEnums) { boolean recreateEnums = false; for (EnumType e : newEnums) { if (e.declaredType.isSubtypeOf(joinWithoutEnums)) { recreateEnums = true; break; } } if (!recreateEnums) { return newEnums; } ImmutableSet.Builder<EnumType> builder = ImmutableSet.builder(); for (EnumType e : newEnums) { if (!e.declaredType.isSubtypeOf(joinWithoutEnums)) { builder.add(e); } } return builder.build(); } static boolean areSubtypes(JSType t1, JSType t2) { ImmutableSet<EnumType> s1 = t1.getEnums(); if (s1 == null) { return true; } ImmutableSet<EnumType> s2 = t2.getEnums(); for (EnumType e : s1) { if (s2 != null && s2.contains(e)) { continue; } if (!e.declaredType.isSubtypeOf(t2)) { return false; } } return true; } @Override public String toString() { return name; } }
/* * Copyright 2016 LinkedIn Corp. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package com.github.ambry.router; import com.github.ambry.utils.TestUtils; import java.util.Arrays; import java.util.List; import java.util.function.BiFunction; import java.util.function.LongFunction; import org.junit.Test; import static org.junit.Assert.*; /** * Test the {@link ByteRange} class. */ public class ByteRangeTest { /** * Test that we can create valid ranges and read their offsets correctly. * @throws Exception */ @Test public void testValidRange() throws Exception { testByteRangeCreationOffsetRange(0, 0, true); testByteRangeCreationFromStartOffset(0, true); testByteRangeCreationFromStartOffset(15, true); testByteRangeCreationLastNBytes(20, true); testByteRangeCreationLastNBytes(0, true); testByteRangeCreationOffsetRange(22, 44, true); testByteRangeCreationFromStartOffset(Long.MAX_VALUE, true); } /** * Ensure that we cannot create invalid ranges. * @throws Exception */ @Test public void testInvalidRanges() throws Exception { // negative indices testByteRangeCreationOffsetRange(-2, 1, false); testByteRangeCreationOffsetRange(5, -1, false); testByteRangeCreationOffsetRange(0, -1, false); testByteRangeCreationOffsetRange(-3, -2, false); testByteRangeCreationFromStartOffset(-1, false); testByteRangeCreationLastNBytes(-2, false); // start greater than end offset testByteRangeCreationOffsetRange(32, 4, false); testByteRangeCreationOffsetRange(1, 0, false); testByteRangeCreationOffsetRange(Long.MAX_VALUE, Long.MAX_VALUE - 1, false); } /** * Test that resolving {@link ByteRange}s with a blob size to generate ranges with defined start/end offsets works as * expected. * @throws Exception */ @Test public void testResolvedByteRange() throws Exception { // 0-0 (0th byte) ByteRange range = ByteRanges.fromOffsetRange(0, 0); assertRangeResolutionFailure(range, 0); assertRangeResolutionFailure(range, -1); assertRangeResolutionSuccess(range, 2, 0, 0); // 0- (bytes after/including 0) range = ByteRanges.fromStartOffset(0); assertRangeResolutionFailure(range, 0); assertRangeResolutionFailure(range, -1); assertRangeResolutionSuccess(range, 20, 0, 19); // 15- (bytes after/including 15) range = ByteRanges.fromStartOffset(15); assertRangeResolutionFailure(range, 15); assertRangeResolutionFailure(range, -1); assertRangeResolutionSuccess(range, 20, 15, 19); assertRangeResolutionSuccess(range, 16, 15, 15); // -20 (last 20 bytes) range = ByteRanges.fromLastNBytes(20); assertRangeResolutionFailure(range, -1); assertRangeResolutionSuccess(range, 0, 0, -1); assertRangeResolutionSuccess(range, 19, 0, 18); assertRangeResolutionSuccess(range, 20, 0, 19); assertRangeResolutionSuccess(range, 30, 10, 29); // 22-44 (bytes 22 through 44, inclusive) range = ByteRanges.fromOffsetRange(22, 44); assertRangeResolutionSuccess(range, 44, 22, 43); assertRangeResolutionSuccess(range, 45, 22, 44); // {MAX_LONG-50}- (bytes after/including MAX_LONG-50) range = ByteRanges.fromStartOffset(Long.MAX_VALUE - 50); assertRangeResolutionFailure(range, 0); assertRangeResolutionFailure(range, -1); assertRangeResolutionFailure(range, 20); assertRangeResolutionSuccess(range, Long.MAX_VALUE, Long.MAX_VALUE - 50, Long.MAX_VALUE - 1); // Last 0 bytes range = ByteRanges.fromLastNBytes(0); assertRangeResolutionSuccess(range, 0, 0, -1); assertRangeResolutionSuccess(range, 20, 20, 19); } /** * Test toString, equals, and hashCode methods. */ @Test public void testToStringEqualsAndHashcode() { ByteRange a = ByteRanges.fromLastNBytes(4); ByteRange b = ByteRanges.fromLastNBytes(4); assertEquals("ByteRanges should be equal", a, b); assertEquals("ByteRange hashcodes should be equal", a.hashCode(), b.hashCode()); assertEquals("toString output not as expected", "ByteRange{lastNBytes=4}", a.toString()); a = ByteRanges.fromOffsetRange(2, 5); assertFalse("ByteRanges should not be equal", a.equals(b)); b = ByteRanges.fromOffsetRange(2, 5); assertEquals("ByteRanges should be equal", a, b); assertEquals("ByteRange hashcodes should be equal", a.hashCode(), b.hashCode()); assertEquals("toString output not as expected", "ByteRange{startOffset=2, endOffset=5}", a.toString()); a = ByteRanges.fromStartOffset(7); assertFalse("ByteRanges should not be equal", a.equals(b)); b = ByteRanges.fromStartOffset(7); assertEquals("ByteRanges should be equal", a, b); assertEquals("ByteRange hashcodes should be equal", a.hashCode(), b.hashCode()); assertEquals("toString output not as expected", "ByteRange{startOffset=7}", a.toString()); } /** * Test that {@link ByteRange} works as expected for byte ranges with a defined start and end offset. * @param startOffset the (inclusive) start byte offset to test. * @param endOffset the (inclusive) end byte offset to test. * @param expectSuccess {@code true} if the {@link ByteRange} creation should succeed. * @throws Exception */ private void testByteRangeCreationOffsetRange(long startOffset, long endOffset, boolean expectSuccess) throws Exception { List<BiFunction<Long, Long, ByteRange>> factories = Arrays.asList(ByteRange::fromOffsetRange, ByteRanges::fromOffsetRange); for (BiFunction<Long, Long, ByteRange> factory : factories) { if (expectSuccess) { ByteRange byteRange = factory.apply(startOffset, endOffset); assertEquals("Wrong range type", ByteRange.ByteRangeType.OFFSET_RANGE, byteRange.getType()); assertEquals("Wrong startOffset", startOffset, byteRange.getStartOffset()); assertEquals("Wrong endOffset", endOffset, byteRange.getEndOffset()); assertEquals("Wrong range size", endOffset - startOffset + 1, byteRange.getRangeSize()); TestUtils.assertException(UnsupportedOperationException.class, byteRange::getLastNBytes, null); } else { TestUtils.assertException(IllegalArgumentException.class, () -> factory.apply(startOffset, endOffset), null); } } } /** * Test that {@link ByteRange} works as expected for byte ranges with only a defined start offset. * @param startOffset the (inclusive) start byte offset to test. * @param expectSuccess {@code true} if the {@link ByteRange} creation should succeed. * @throws Exception */ private void testByteRangeCreationFromStartOffset(long startOffset, boolean expectSuccess) throws Exception { List<LongFunction<ByteRange>> factories = Arrays.asList(ByteRange::fromStartOffset, ByteRanges::fromStartOffset); for (LongFunction<ByteRange> factory : factories) { if (expectSuccess) { ByteRange byteRange = factory.apply(startOffset); assertEquals("Wrong range type", ByteRange.ByteRangeType.FROM_START_OFFSET, byteRange.getType()); assertEquals("Wrong startOffset", startOffset, byteRange.getStartOffset()); TestUtils.assertException(UnsupportedOperationException.class, byteRange::getEndOffset, null); TestUtils.assertException(UnsupportedOperationException.class, byteRange::getLastNBytes, null); TestUtils.assertException(UnsupportedOperationException.class, byteRange::getRangeSize, null); } else { TestUtils.assertException(IllegalArgumentException.class, () -> factory.apply(startOffset), null); } } } /** * Test that {@link ByteRange} works as expected for byte ranges encoding the number of bytes to read from the end * of an object. * @param lastNBytes the number of bytes to read from the end of an object. * @param expectSuccess {@code true} if the {@link ByteRange} creation should succeed. * @throws Exception */ private void testByteRangeCreationLastNBytes(long lastNBytes, boolean expectSuccess) throws Exception { List<LongFunction<ByteRange>> factories = Arrays.asList(ByteRange::fromLastNBytes, ByteRanges::fromLastNBytes); for (LongFunction<ByteRange> factory : factories) { if (expectSuccess) { ByteRange byteRange = factory.apply(lastNBytes); assertEquals("Wrong range type", ByteRange.ByteRangeType.LAST_N_BYTES, byteRange.getType()); assertEquals("Wrong lastNBytes", lastNBytes, byteRange.getLastNBytes()); assertEquals("Wrong range size", lastNBytes, byteRange.getRangeSize()); TestUtils.assertException(UnsupportedOperationException.class, byteRange::getStartOffset, null); TestUtils.assertException(UnsupportedOperationException.class, byteRange::getEndOffset, null); } else { TestUtils.assertException(IllegalArgumentException.class, () -> factory.apply(lastNBytes), null); } } } /** * Test and assert that a {@link ByteRange} fails validation with a specified total blob size. * @param byteRange the {@link ByteRange} to resolve with a total blob size. * @param totalSize the total size of a blob. */ private void assertRangeResolutionFailure(ByteRange byteRange, long totalSize) { try { byteRange.toResolvedByteRange(totalSize); fail("Should have failed to resolve range: " + byteRange + " with total size: " + totalSize); } catch (IllegalArgumentException expected) { } } /** * Test and assert that a {@link ByteRange} passes validation with a specified total blob size. Ensure that * the defined (wrt the total blob size) start and end offsets are set correctly in the resolved {@link ByteRange}. * @param byteRange the {@link ByteRange} to resolve with a total blob size. * @param totalSize the total size of a blob. * @param startOffset the expected start offset for the resolved {@link ByteRange} * @param endOffset the expected end offset for the resolved {@link ByteRange} * @throws Exception */ private void assertRangeResolutionSuccess(ByteRange byteRange, long totalSize, long startOffset, long endOffset) throws Exception { ByteRange resolvedByteRange = byteRange.toResolvedByteRange(totalSize); assertEquals("Wrong startOffset with raw range: " + byteRange + " and total size: " + totalSize, startOffset, resolvedByteRange.getStartOffset()); assertEquals("Wrong endOffset with raw range: " + byteRange + " and total size: " + totalSize, endOffset, resolvedByteRange.getEndOffset()); } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.vulkan; import javax.annotation.*; import java.nio.*; import org.lwjgl.*; import org.lwjgl.system.*; import static org.lwjgl.system.MemoryUtil.*; import static org.lwjgl.system.MemoryStack.*; /** * Structure specifying a three-dimensional extent. * * <h5>See Also</h5> * * <p>{@link VkBufferImageCopy}, {@link VkBufferImageCopy2KHR}, {@link VkImageCopy}, {@link VkImageCopy2KHR}, {@link VkImageCreateInfo}, {@link VkImageFormatProperties}, {@link VkImageResolve}, {@link VkImageResolve2KHR}, {@link VkQueueFamilyProperties}, {@link VkSparseImageFormatProperties}, {@link VkSparseImageMemoryBind}</p> * * <h3>Layout</h3> * * <pre><code> * struct VkExtent3D { * uint32_t {@link #width}; * uint32_t {@link #height}; * uint32_t {@link #depth}; * }</code></pre> */ public class VkExtent3D extends Struct implements NativeResource { /** The struct size in bytes. */ public static final int SIZEOF; /** The struct alignment in bytes. */ public static final int ALIGNOF; /** The struct member offsets. */ public static final int WIDTH, HEIGHT, DEPTH; static { Layout layout = __struct( __member(4), __member(4), __member(4) ); SIZEOF = layout.getSize(); ALIGNOF = layout.getAlignment(); WIDTH = layout.offsetof(0); HEIGHT = layout.offsetof(1); DEPTH = layout.offsetof(2); } /** * Creates a {@code VkExtent3D} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be * visible to the struct instance and vice versa. * * <p>The created instance holds a strong reference to the container object.</p> */ public VkExtent3D(ByteBuffer container) { super(memAddress(container), __checkContainer(container, SIZEOF)); } @Override public int sizeof() { return SIZEOF; } /** the width of the extent. */ @NativeType("uint32_t") public int width() { return nwidth(address()); } /** the height of the extent. */ @NativeType("uint32_t") public int height() { return nheight(address()); } /** the depth of the extent. */ @NativeType("uint32_t") public int depth() { return ndepth(address()); } /** Sets the specified value to the {@link #width} field. */ public VkExtent3D width(@NativeType("uint32_t") int value) { nwidth(address(), value); return this; } /** Sets the specified value to the {@link #height} field. */ public VkExtent3D height(@NativeType("uint32_t") int value) { nheight(address(), value); return this; } /** Sets the specified value to the {@link #depth} field. */ public VkExtent3D depth(@NativeType("uint32_t") int value) { ndepth(address(), value); return this; } /** Initializes this struct with the specified values. */ public VkExtent3D set( int width, int height, int depth ) { width(width); height(height); depth(depth); return this; } /** * Copies the specified struct data to this struct. * * @param src the source struct * * @return this struct */ public VkExtent3D set(VkExtent3D src) { memCopy(src.address(), address(), SIZEOF); return this; } // ----------------------------------- /** Returns a new {@code VkExtent3D} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */ public static VkExtent3D malloc() { return wrap(VkExtent3D.class, nmemAllocChecked(SIZEOF)); } /** Returns a new {@code VkExtent3D} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */ public static VkExtent3D calloc() { return wrap(VkExtent3D.class, nmemCallocChecked(1, SIZEOF)); } /** Returns a new {@code VkExtent3D} instance allocated with {@link BufferUtils}. */ public static VkExtent3D create() { ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF); return wrap(VkExtent3D.class, memAddress(container), container); } /** Returns a new {@code VkExtent3D} instance for the specified memory address. */ public static VkExtent3D create(long address) { return wrap(VkExtent3D.class, address); } /** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VkExtent3D createSafe(long address) { return address == NULL ? null : wrap(VkExtent3D.class, address); } /** * Returns a new {@link VkExtent3D.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VkExtent3D.Buffer malloc(int capacity) { return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity); } /** * Returns a new {@link VkExtent3D.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VkExtent3D.Buffer calloc(int capacity) { return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity); } /** * Returns a new {@link VkExtent3D.Buffer} instance allocated with {@link BufferUtils}. * * @param capacity the buffer capacity */ public static VkExtent3D.Buffer create(int capacity) { ByteBuffer container = __create(capacity, SIZEOF); return wrap(Buffer.class, memAddress(container), capacity, container); } /** * Create a {@link VkExtent3D.Buffer} instance at the specified memory. * * @param address the memory address * @param capacity the buffer capacity */ public static VkExtent3D.Buffer create(long address, int capacity) { return wrap(Buffer.class, address, capacity); } /** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VkExtent3D.Buffer createSafe(long address, int capacity) { return address == NULL ? null : wrap(Buffer.class, address, capacity); } // ----------------------------------- /** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */ @Deprecated public static VkExtent3D mallocStack() { return malloc(stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */ @Deprecated public static VkExtent3D callocStack() { return calloc(stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */ @Deprecated public static VkExtent3D mallocStack(MemoryStack stack) { return malloc(stack); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */ @Deprecated public static VkExtent3D callocStack(MemoryStack stack) { return calloc(stack); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */ @Deprecated public static VkExtent3D.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */ @Deprecated public static VkExtent3D.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */ @Deprecated public static VkExtent3D.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */ @Deprecated public static VkExtent3D.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); } /** * Returns a new {@code VkExtent3D} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate */ public static VkExtent3D malloc(MemoryStack stack) { return wrap(VkExtent3D.class, stack.nmalloc(ALIGNOF, SIZEOF)); } /** * Returns a new {@code VkExtent3D} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate */ public static VkExtent3D calloc(MemoryStack stack) { return wrap(VkExtent3D.class, stack.ncalloc(ALIGNOF, 1, SIZEOF)); } /** * Returns a new {@link VkExtent3D.Buffer} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VkExtent3D.Buffer malloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity); } /** * Returns a new {@link VkExtent3D.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VkExtent3D.Buffer calloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity); } // ----------------------------------- /** Unsafe version of {@link #width}. */ public static int nwidth(long struct) { return UNSAFE.getInt(null, struct + VkExtent3D.WIDTH); } /** Unsafe version of {@link #height}. */ public static int nheight(long struct) { return UNSAFE.getInt(null, struct + VkExtent3D.HEIGHT); } /** Unsafe version of {@link #depth}. */ public static int ndepth(long struct) { return UNSAFE.getInt(null, struct + VkExtent3D.DEPTH); } /** Unsafe version of {@link #width(int) width}. */ public static void nwidth(long struct, int value) { UNSAFE.putInt(null, struct + VkExtent3D.WIDTH, value); } /** Unsafe version of {@link #height(int) height}. */ public static void nheight(long struct, int value) { UNSAFE.putInt(null, struct + VkExtent3D.HEIGHT, value); } /** Unsafe version of {@link #depth(int) depth}. */ public static void ndepth(long struct, int value) { UNSAFE.putInt(null, struct + VkExtent3D.DEPTH, value); } // ----------------------------------- /** An array of {@link VkExtent3D} structs. */ public static class Buffer extends StructBuffer<VkExtent3D, Buffer> implements NativeResource { private static final VkExtent3D ELEMENT_FACTORY = VkExtent3D.create(-1L); /** * Creates a new {@code VkExtent3D.Buffer} instance backed by the specified container. * * Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values * will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided * by {@link VkExtent3D#SIZEOF}, and its mark will be undefined. * * <p>The created buffer instance holds a strong reference to the container object.</p> */ public Buffer(ByteBuffer container) { super(container, container.remaining() / SIZEOF); } public Buffer(long address, int cap) { super(address, null, -1, 0, cap, cap); } Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) { super(address, container, mark, pos, lim, cap); } @Override protected Buffer self() { return this; } @Override protected VkExtent3D getElementFactory() { return ELEMENT_FACTORY; } /** @return the value of the {@link VkExtent3D#width} field. */ @NativeType("uint32_t") public int width() { return VkExtent3D.nwidth(address()); } /** @return the value of the {@link VkExtent3D#height} field. */ @NativeType("uint32_t") public int height() { return VkExtent3D.nheight(address()); } /** @return the value of the {@link VkExtent3D#depth} field. */ @NativeType("uint32_t") public int depth() { return VkExtent3D.ndepth(address()); } /** Sets the specified value to the {@link VkExtent3D#width} field. */ public VkExtent3D.Buffer width(@NativeType("uint32_t") int value) { VkExtent3D.nwidth(address(), value); return this; } /** Sets the specified value to the {@link VkExtent3D#height} field. */ public VkExtent3D.Buffer height(@NativeType("uint32_t") int value) { VkExtent3D.nheight(address(), value); return this; } /** Sets the specified value to the {@link VkExtent3D#depth} field. */ public VkExtent3D.Buffer depth(@NativeType("uint32_t") int value) { VkExtent3D.ndepth(address(), value); return this; } } }
/*L * Copyright SAIC and Capability Plus solutions * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cagrid-iphone-app/LICENSE.txt for details. */ package gov.nih.nci.gss.grid; import gov.nih.nci.cagrid.discovery.client.DiscoveryClient; import gov.nih.nci.cagrid.metadata.MetadataUtils; import gov.nih.nci.cagrid.metadata.ServiceMetadata; import gov.nih.nci.cagrid.metadata.common.Address; import gov.nih.nci.cagrid.metadata.common.ResearchCenter; import gov.nih.nci.cagrid.metadata.common.ResearchCenterPointOfContactCollection; import gov.nih.nci.cagrid.metadata.common.UMLAttribute; import gov.nih.nci.cagrid.metadata.dataservice.DomainModel; import gov.nih.nci.cagrid.metadata.dataservice.UMLClass; import gov.nih.nci.cagrid.metadata.exceptions.InvalidResourcePropertyException; import gov.nih.nci.cagrid.metadata.exceptions.RemoteResourcePropertyRetrievalException; import gov.nih.nci.cagrid.metadata.exceptions.ResourcePropertyRetrievalException; import gov.nih.nci.cagrid.metadata.service.Service; import gov.nih.nci.cagrid.metadata.service.ServicePointOfContactCollection; import gov.nih.nci.gss.domain.AnalyticalService; import gov.nih.nci.gss.domain.DataService; import gov.nih.nci.gss.domain.DomainAttribute; import gov.nih.nci.gss.domain.DomainClass; import gov.nih.nci.gss.domain.GridService; import gov.nih.nci.gss.domain.HostingCenter; import gov.nih.nci.gss.util.GSSProperties; import gov.nih.nci.gss.util.GSSUtil; import gov.nih.nci.gss.util.StringUtil; import java.net.SocketException; import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import org.apache.axis.message.addressing.AttributedURI; import org.apache.axis.message.addressing.EndpointReferenceType; import org.apache.log4j.Logger; /** * Borrowed from caNanoLab project Grid service utils for grid node discovery * and grid node URL lookup. Subsequently reborrowed from LSDB. * * @author sahnih, pansu, piepenbringc * */ public class GridIndexService { private static Logger logger = Logger.getLogger(GridIndexService.class); /** * Query the grid index service by domain model name and return a list of * EndpointReferenceType. * * @param indexServiceURL * @param application * @param extantURLs * @param appOwner * @return * @throws GridAutoDiscoveryException */ public static List<GridService> discoverGridServices() throws GridAutoDiscoveryException { EndpointReferenceType[] returnedServices = null; List<EndpointReferenceType> services = null; String indexServiceURL = GSSProperties.getGridIndexURL(); try { DiscoveryClient discoveryClient = new DiscoveryClient( indexServiceURL); returnedServices = discoveryClient.getAllServices(true); } catch (Exception e) { throw new GridAutoDiscoveryException("Error discovering services",e); } if (returnedServices != null) { services = new ArrayList<EndpointReferenceType>(Arrays .asList(returnedServices)); } return populateServiceMetadata(services); } public static List<GridService> populateServiceMetadata( List<EndpointReferenceType> services) { if (services == null) return new ArrayList<GridService>(); Set<GridService> gridNodeSet = new HashSet<GridService>(); for (EndpointReferenceType service : services) { String url = service.getAddress().toString(); // TODO: parallelize this try { ServiceMetadata serviceMetaData = MetadataUtils.getServiceMetadata(service); if (serviceMetaData != null) { DomainModel domainModel = null; try { domainModel = MetadataUtils.getDomainModel(service); } catch (InvalidResourcePropertyException e) { logger.debug("No domain model for: " + url); } catch (RemoteResourcePropertyRetrievalException e) { logger.warn("Could not retrieve domain model for: " + url); } catch (ResourcePropertyRetrievalException e) { logger.warn("Could not parse domain model for: " + url); } GridService gridNode = populateGridService( service,serviceMetaData,domainModel); if (gridNode != null) { logger.info("Discovered service: " + gridNode.getUrl() + " ("+gridNode.getName()+")"); gridNodeSet.add(gridNode); } } } catch (Exception e) { String err = "Can't retrieve service metadata for: " + service.getAddress().toString(); Throwable root = GSSUtil.getRootException(e); if (root instanceof SocketException || root instanceof SocketTimeoutException) { logger.warn(err); } else { logger.error(err,e); } } } return new ArrayList<GridService>(gridNodeSet); } public static String getServiceVersion(EndpointReferenceType service) { String version = null; if (service != null) { try { DomainModel model = gov.nih.nci.cagrid.metadata.MetadataUtils .getDomainModel(service); version = model.getProjectVersion(); String desp = model.getProjectDescription(); logger.debug("Model Version: " + desp); } catch (InvalidResourcePropertyException e) { logger.warn(e.getMessage(), e); } catch (RemoteResourcePropertyRetrievalException e) { logger.warn(e.getMessage(), e); } catch (ResourcePropertyRetrievalException e) { logger.warn(e.getMessage(), e); } } return version; } public static GridService populateGridService(EndpointReferenceType serviceER, ServiceMetadata metadata, DomainModel model) { GridService newService = model != null ? new DataService() : new AnalyticalService(); // Build GridService object newService.setPointOfContacts(new HashSet<gov.nih.nci.gss.domain.PointOfContact>()); if (metadata != null) { // Get the buried service description from the metadata Service serviceData = metadata.getServiceDescription().getService(); if (serviceData != null) { newService.setName(serviceData.getName()); newService.setDescription(serviceData.getDescription()); newService.setVersion(serviceData.getVersion()); // Get POC objects ServicePointOfContactCollection pocs = serviceData.getPointOfContactCollection(); if (pocs != null) { newService.setPointOfContacts(populatePOCList(pocs.getPointOfContact())); } } // Build Hosting Center object newService.setHostingCenter(populateHostingCenter(metadata)); } // Set the service URL (unique key) newService.setUrl(serviceER.getAddress().toString()); // Deferred to caller, not available in metadata newService.setIdentifier(null); newService.setSimpleName(null); newService.setPublishDate(null); // Build Domain Model object for data services if (newService instanceof DataService) { DataService dataService = ((DataService)newService); dataService.setDomainModel(populateDomainModel(model)); // Deferred to caller, not available in metadata dataService.setGroup(null); dataService.setSearchDefault(false); dataService.setAccessible(true); } return newService; } private static gov.nih.nci.gss.domain.DomainModel populateDomainModel(DomainModel model) { gov.nih.nci.gss.domain.DomainModel newModel = new gov.nih.nci.gss.domain.DomainModel(); if (model == null) return newModel; newModel.setDescription(model.getProjectDescription()); newModel.setLongName(model.getProjectLongName()); newModel.setVersion(model.getProjectVersion()); List<DomainClass> classList = new ArrayList<DomainClass>(); for (UMLClass umlClass : model.getExposedUMLClassCollection().getUMLClass()) { DomainClass newClass = new DomainClass(); newClass.setClassName(umlClass.getClassName()); newClass.setDescription(umlClass.getDescription()); newClass.setDomainPackage(umlClass.getPackageName()); newClass.setModel(newModel); newClass.setCount(null); newClass.setCountDate(null); newClass.setCountError(null); newClass.setCountStacktrace(null); List<DomainAttribute> attrList = new ArrayList<DomainAttribute>(); if ((umlClass.getUmlAttributeCollection() != null) && (umlClass.getUmlAttributeCollection().getUMLAttribute() != null)) { for(UMLAttribute umlAttr : umlClass.getUmlAttributeCollection().getUMLAttribute()) { gov.nih.nci.gss.domain.DomainAttribute newAttr = new gov.nih.nci.gss.domain.DomainAttribute(); newAttr.setAttributeName(umlAttr.getName()); newAttr.setDataTypeName(umlAttr.getDataTypeName()); newAttr.setCdePublicId(umlAttr.getPublicID()); newAttr.setDomainClass(newClass); attrList.add(newAttr); } Collections.sort(attrList, new Comparator<DomainAttribute>() { public int compare(DomainAttribute da0, DomainAttribute da1) { return da0.getAttributeName().compareTo(da1.getAttributeName()); } }); newClass.setAttributes(new LinkedHashSet<DomainAttribute>(attrList)); } classList.add(newClass); } Collections.sort(classList, new Comparator<DomainClass>() { public int compare(DomainClass dc0, DomainClass dc1) { int dpc = dc0.getDomainPackage().compareTo(dc1.getDomainPackage()); if (dpc == 0) { return dc0.getClassName().compareTo(dc1.getClassName()); } return dpc; } }); newModel.setClasses(new LinkedHashSet<DomainClass>(classList)); return newModel; } private static Collection<gov.nih.nci.gss.domain.PointOfContact> populatePOCList(gov.nih.nci.cagrid.metadata.common.PointOfContact[] POCs) { Collection<gov.nih.nci.gss.domain.PointOfContact> POClist = new HashSet<gov.nih.nci.gss.domain.PointOfContact>(); if (POCs == null) return POClist; for (gov.nih.nci.cagrid.metadata.common.PointOfContact POC : POCs) { gov.nih.nci.gss.domain.PointOfContact newPOC = new gov.nih.nci.gss.domain.PointOfContact(); newPOC.setAffiliation(POC.getAffiliation()); newPOC.setEmail(POC.getEmail()); newPOC.setRole(POC.getRole()); String name = POC.getFirstName(); if (name == null || name.trim().length() == 0) { name = POC.getLastName(); } else { name = name + " " + POC.getLastName(); } newPOC.setName(name); if (!StringUtil.isEmpty(newPOC.getName()) || !StringUtil.isEmpty(newPOC.getEmail()) || !StringUtil.isEmpty(newPOC.getAffiliation()) || !StringUtil.isEmpty(newPOC.getRole())) { POClist.add(newPOC); } } return POClist; } private static HostingCenter populateHostingCenter(ServiceMetadata metadata) { gov.nih.nci.gss.domain.HostingCenter newCenter = new gov.nih.nci.gss.domain.HostingCenter(); ResearchCenter center = metadata.getHostingResearchCenter().getResearchCenter(); if (center == null) return newCenter; Address rcAddress = center.getAddress(); newCenter.setCountryCode(rcAddress.getCountry()); newCenter.setLocality(rcAddress.getLocality()); newCenter.setPostalCode(rcAddress.getPostalCode()); newCenter.setStateProvince(rcAddress.getStateProvince()); String streetAddr = rcAddress.getStreet2(); if (streetAddr == null || streetAddr.trim().length() == 0) { streetAddr = rcAddress.getStreet1(); } else { streetAddr = rcAddress.getStreet1() + "\n" + streetAddr; } newCenter.setStreet(streetAddr); newCenter.setLongName(center.getDisplayName()); newCenter.setShortName(center.getShortName()); if (StringUtil.isEmpty(newCenter.getLongName())) { return null; } // Build Hosting Center POCs ResearchCenterPointOfContactCollection pocs = center.getPointOfContactCollection(); if (pocs != null) { newCenter.setPointOfContacts(populatePOCList(pocs.getPointOfContact())); } return newCenter; } public static void main(String args[]) throws Exception { String url = "https://206.81.165.52:47210/wsrf/services/cagrid/CaTissueSuite"; EndpointReferenceType service = new EndpointReferenceType(); service.setAddress(new AttributedURI(url)); logger.info("Getting metadata for: " + url); ServiceMetadata serviceMetaData = MetadataUtils.getServiceMetadata(service); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2.test; import java.io.OutputStream; import java.util.ArrayList; import java.util.HashSet; import java.util.Set; import org.apache.commons.vfs2.Capability; import org.apache.commons.vfs2.FileChangeEvent; import org.apache.commons.vfs2.FileContent; import org.apache.commons.vfs2.FileListener; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystem; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileType; import org.apache.commons.vfs2.Selectors; import org.junit.Assert; /** * File system test that check that a file system can be modified. */ public class ProviderWriteTests extends AbstractProviderTestCase { protected FileObject getReadFolderDir1() throws FileSystemException { return getReadFolder().resolveFile("dir1"); } /** * Returns the capabilities required by the tests of this test case. */ @Override protected Capability[] getRequiredCaps() { return new Capability[] { Capability.CREATE, Capability.DELETE, Capability.GET_TYPE, Capability.LIST_CHILDREN, Capability.READ_CONTENT, Capability.WRITE_CONTENT }; } /** * Sets up a scratch folder for the test to use. */ protected FileObject createScratchFolder() throws Exception { final FileObject scratchFolder = getWriteFolder(); // Make sure the test folder is empty scratchFolder.delete(Selectors.EXCLUDE_SELF); scratchFolder.createFolder(); return scratchFolder; } /** * Tests folder creation. */ public void testFolderCreate() throws Exception { final FileObject scratchFolder = createScratchFolder(); // Create direct child of the test folder FileObject folder = scratchFolder.resolveFile("dir1"); assertTrue(!folder.exists()); folder.createFolder(); assertTrue(folder.exists()); assertSame(FileType.FOLDER, folder.getType()); assertTrue(folder.isFolder()); assertEquals(0, folder.getChildren().length); // Create a descendant, where the intermediate folders don't exist folder = scratchFolder.resolveFile("dir2/dir1/dir1"); assertTrue(!folder.exists()); assertTrue(!folder.getParent().exists()); assertTrue(!folder.getParent().getParent().exists()); folder.createFolder(); assertTrue(folder.exists()); assertSame(FileType.FOLDER, folder.getType()); assertTrue(folder.isFolder()); assertEquals(0, folder.getChildren().length); assertTrue(folder.getParent().exists()); assertTrue(folder.getParent().getParent().exists()); // Test creating a folder that already exists assertTrue(folder.exists()); folder.createFolder(); } /** * Tests file creation */ public void testFileCreate() throws Exception { final FileObject scratchFolder = createScratchFolder(); // Create direct child of the test folder FileObject file = scratchFolder.resolveFile("file1.txt"); assertTrue(!file.exists()); file.createFile(); assertTrue(file.exists()); assertSame(FileType.FILE, file.getType()); assertTrue(file.isFile()); assertEquals(0, file.getContent().getSize()); assertFalse(file.isHidden()); assertTrue(file.isReadable()); assertTrue(file.isWriteable()); // Create direct child of the test folder - special name file = scratchFolder.resolveFile("file1%25.txt"); assertTrue(!file.exists()); file.createFile(); assertTrue(file.exists()); assertSame(FileType.FILE, file.getType()); assertTrue(file.isFile()); assertEquals(0, file.getContent().getSize()); assertFalse(file.isHidden()); assertTrue(file.isReadable()); assertTrue(file.isWriteable()); // Create a descendant, where the intermediate folders don't exist file = scratchFolder.resolveFile("dir1/dir1/file1.txt"); assertTrue(!file.exists()); assertTrue(!file.getParent().exists()); assertTrue(!file.getParent().getParent().exists()); file.createFile(); assertTrue(file.exists()); assertSame(FileType.FILE, file.getType()); assertTrue(file.isFile()); assertEquals(0, file.getContent().getSize()); assertTrue(file.getParent().exists()); assertTrue(file.getParent().getParent().exists()); assertFalse(file.getParent().isHidden()); assertFalse(file.getParent().getParent().isHidden()); // Test creating a file that already exists assertTrue(file.exists()); file.createFile(); assertTrue(file.exists()); assertTrue(file.isReadable()); assertTrue(file.isWriteable()); } /** * Tests file/folder creation with mismatched types. */ public void testFileCreateMismatched() throws Exception { final FileObject scratchFolder = createScratchFolder(); // Create a test file and folder final FileObject file = scratchFolder.resolveFile("dir1/file1.txt"); file.createFile(); assertEquals(FileType.FILE, file.getType()); assertTrue(file.isFile()); final FileObject folder = scratchFolder.resolveFile("dir1/dir2"); folder.createFolder(); assertEquals(FileType.FOLDER, folder.getType()); assertTrue(folder.isFolder()); // Attempt to create a file that already exists as a folder try { folder.createFile(); fail(); } catch (final FileSystemException exc) { } // Attempt to create a folder that already exists as a file try { file.createFolder(); fail(); } catch (final FileSystemException exc) { } // Attempt to create a folder as a child of a file final FileObject folder2 = file.resolveFile("some-child"); try { folder2.createFolder(); fail(); } catch (final FileSystemException exc) { } } /** * Tests deletion */ public void testDelete() throws Exception { // Set-up the test structure final FileObject folder = createScratchFolder(); folder.resolveFile("file1.txt").createFile(); folder.resolveFile("file%25.txt").createFile(); folder.resolveFile("emptydir").createFolder(); folder.resolveFile("dir1/file1.txt").createFile(); folder.resolveFile("dir1/dir2/file2.txt").createFile(); // Delete a file FileObject file = folder.resolveFile("file1.txt"); assertTrue(file.exists()); file.deleteAll(); assertTrue(!file.exists()); // Delete a special name file file = folder.resolveFile("file%25.txt"); assertTrue(file.exists()); file.deleteAll(); assertTrue(!file.exists()); // Delete an empty folder file = folder.resolveFile("emptydir"); assertTrue(file.exists()); file.deleteAll(); assertTrue(!file.exists()); // Recursive delete file = folder.resolveFile("dir1"); final FileObject file2 = file.resolveFile("dir2/file2.txt"); assertTrue(file.exists()); assertTrue(file2.exists()); file.deleteAll(); assertTrue(!file.exists()); assertTrue(!file2.exists()); // Delete a file that does not exist file = folder.resolveFile("some-folder/some-file"); assertTrue(!file.exists()); file.deleteAll(); assertTrue(!file.exists()); } /** * Tests deletion */ public void testDeleteAllDescendents() throws Exception { // Set-up the test structure final FileObject folder = createScratchFolder(); folder.resolveFile("file1.txt").createFile(); folder.resolveFile("file%25.txt").createFile(); folder.resolveFile("emptydir").createFolder(); folder.resolveFile("dir1/file1.txt").createFile(); folder.resolveFile("dir1/dir2/file2.txt").createFile(); // Delete a file FileObject file = folder.resolveFile("file1.txt"); assertTrue(file.exists()); file.deleteAll(); assertTrue(!file.exists()); // Delete a special name file file = folder.resolveFile("file%25.txt"); assertTrue(file.exists()); file.deleteAll(); assertTrue(!file.exists()); // Delete an empty folder file = folder.resolveFile("emptydir"); assertTrue(file.exists()); file.deleteAll(); assertTrue(!file.exists()); // Recursive delete file = folder.resolveFile("dir1"); final FileObject file2 = file.resolveFile("dir2/file2.txt"); assertTrue(file.exists()); assertTrue(file2.exists()); file.deleteAll(); assertTrue(!file.exists()); assertTrue(!file2.exists()); // Delete a file that does not exist file = folder.resolveFile("some-folder/some-file"); assertTrue(!file.exists()); file.deleteAll(); assertTrue(!file.exists()); } /** * Tests concurrent read and write on the same file fails. */ /* * imario@apache.org leave this to some sort of LockManager public void testConcurrentReadWrite() throws Exception { * final FileObject scratchFolder = createScratchFolder(); * * final FileObject file = scratchFolder.resolveFile("file1.txt"); file.createFile(); * * // Start reading from the file final InputStream instr = file.getContent().getInputStream(); * * try { // Try to write to the file file.getContent().getOutputStream(); fail(); } catch (final FileSystemException * e) { // Check error message assertSameMessage("vfs.provider/write-in-use.error", file, e); } finally { * instr.close(); } } */ /** * Tests concurrent writes on the same file fails. */ /* * imario@apache.org leave this to some sort of LockManager public void testConcurrentWrite() throws Exception { * final FileObject scratchFolder = createScratchFolder(); * * final FileObject file = scratchFolder.resolveFile("file1.txt"); file.createFile(); * * // Start writing to the file final OutputStream outstr = file.getContent().getOutputStream(); final String * testContent = "some content"; try { // Write some content to the first stream * outstr.write(testContent.getBytes()); * * // Try to open another output stream file.getContent().getOutputStream(); fail(); } catch (final * FileSystemException e) { // Check error message assertSameMessage("vfs.provider/write-in-use.error", file, e); } * finally { outstr.close(); } * * // Make sure that the content written to the first stream is actually applied assertSameContent(testContent, * file); } */ /** * Tests file copy to and from the same filesystem type. This was a problem w/ FTP. */ public void testCopySameFileSystem() throws Exception { final FileObject scratchFolder = createScratchFolder(); // Create direct child of the test folder final FileObject file = scratchFolder.resolveFile("file1.txt"); assertTrue(!file.exists()); // Create the source file final String content = "Here is some sample content for the file. Blah Blah Blah."; final OutputStream os = file.getContent().getOutputStream(); try { os.write(content.getBytes("utf-8")); } finally { os.close(); } assertSameContent(content, file); // Make sure we can copy the new file to another file on the same filesystem final FileObject fileCopy = scratchFolder.resolveFile("file1copy.txt"); assertTrue(!fileCopy.exists()); fileCopy.copyFrom(file, Selectors.SELECT_SELF); assertSameContent(content, fileCopy); } /** * Tests overwriting a file on the same file system. */ public void testCopyFromOverwriteSameFileSystem() throws Exception { final FileObject scratchFolder = createScratchFolder(); // Create direct child of the test folder final FileObject file = scratchFolder.resolveFile("file1.txt"); assertTrue(!file.exists()); // Create the source file final String content = "Here is some sample content for the file. Blah Blah Blah."; final OutputStream os = file.getContent().getOutputStream(); try { os.write(content.getBytes("utf-8")); } finally { os.close(); } assertSameContent(content, file); // Make sure we can copy the new file to another file on the same filesystem final FileObject fileCopy = scratchFolder.resolveFile("file1copy.txt"); assertTrue(!fileCopy.exists()); fileCopy.copyFrom(file, Selectors.SELECT_SELF); assertSameContent(content, fileCopy); // Make sure we can copy the same new file to the same target file on the same filesystem assertTrue(fileCopy.exists()); fileCopy.copyFrom(file, Selectors.SELECT_SELF); assertSameContent(content, fileCopy); } /** * Tests create-delete-create-a-file sequence on the same file system. */ public void testCreateDeleteCreateSameFileSystem() throws Exception { final FileObject scratchFolder = createScratchFolder(); // Create direct child of the test folder final FileObject file = scratchFolder.resolveFile("file1.txt"); assertTrue(!file.exists()); // Create the source file final String content = "Here is some sample content for the file. Blah Blah Blah."; final OutputStream os = file.getContent().getOutputStream(); try { os.write(content.getBytes("utf-8")); } finally { os.close(); } assertSameContent(content, file); // Make sure we can copy the new file to another file on the same filesystem final FileObject fileCopy = scratchFolder.resolveFile("file1copy.txt"); assertTrue(!fileCopy.exists()); fileCopy.copyFrom(file, Selectors.SELECT_SELF); assertSameContent(content, fileCopy); // Delete the file. assertTrue(fileCopy.exists()); assertTrue(fileCopy.delete()); // Make sure we can copy the same new file to the same target file on the same filesystem assertTrue(!fileCopy.exists()); fileCopy.copyFrom(file, Selectors.SELECT_SELF); assertSameContent(content, fileCopy); } /** * Tests that test read folder is not hidden. */ public void testFolderIsHidden() throws Exception { final FileObject folder = getReadFolderDir1(); Assert.assertFalse(folder.isHidden()); } /** * Tests that test read folder is readable. */ public void testFolderIsReadable() throws Exception { final FileObject folder = getReadFolderDir1(); Assert.assertTrue(folder.isReadable()); } /** * Tests that test folder iswritable. */ public void testFolderIsWritable() throws Exception { final FileObject folder = getWriteFolder().resolveFile("dir1"); Assert.assertTrue(folder.isWriteable()); } /** * Test that children are handled correctly by create and delete. */ public void testListChildren() throws Exception { final FileObject folder = createScratchFolder(); final HashSet<String> names = new HashSet<>(); // Make sure the folder is empty assertEquals(0, folder.getChildren().length); // Create a child folder folder.resolveFile("dir1").createFolder(); names.add("dir1"); assertSameFileSet(names, folder.getChildren()); // Create a child file folder.resolveFile("file1.html").createFile(); names.add("file1.html"); assertSameFileSet(names, folder.getChildren()); // Create a descendent folder.resolveFile("dir2/file1.txt").createFile(); names.add("dir2"); assertSameFileSet(names, folder.getChildren()); // Create a child file via an output stream final OutputStream outstr = folder.resolveFile("file2.txt").getContent().getOutputStream(); outstr.close(); names.add("file2.txt"); assertSameFileSet(names, folder.getChildren()); // Delete a child folder folder.resolveFile("dir1").deleteAll(); names.remove("dir1"); assertSameFileSet(names, folder.getChildren()); // Delete a child file folder.resolveFile("file1.html").deleteAll(); names.remove("file1.html"); assertSameFileSet(names, folder.getChildren()); // Recreate the folder folder.deleteAll(); folder.createFolder(); assertEquals(0, folder.getChildren().length); } /** * Check listeners are notified of changes. */ public void testListener() throws Exception { final FileObject baseFile = createScratchFolder(); final FileObject child = baseFile.resolveFile("newfile.txt"); assertTrue(!child.exists()); final FileSystem fs = baseFile.getFileSystem(); final TestListener listener = new TestListener(child); fs.addListener(child, listener); try { // Create as a folder listener.addCreateEvent(); child.createFolder(); listener.assertFinished(); // Create the folder again. Should not get an event. child.createFolder(); // Delete listener.addDeleteEvent(); child.delete(); listener.assertFinished(); // Delete again. Should not get an event child.delete(); // Create as a file listener.addCreateEvent(); child.createFile(); listener.assertFinished(); // Create the file again. Should not get an event child.createFile(); listener.addDeleteEvent(); child.delete(); // Create as a file, by writing to it. listener.addCreateEvent(); child.getContent().getOutputStream().close(); listener.assertFinished(); // Recreate the file by writing to it child.getContent().getOutputStream().close(); // Copy another file over the top final FileObject otherChild = baseFile.resolveFile("folder1"); otherChild.createFolder(); listener.addDeleteEvent(); listener.addCreateEvent(); child.copyFrom(otherChild, Selectors.SELECT_SELF); listener.assertFinished(); } finally { fs.removeListener(child, listener); } } /** * Ensures the names of a set of files match an expected set. */ private void assertSameFileSet(final Set<String> names, final FileObject[] files) { // Make sure the sets are the same length assertEquals(names.size(), files.length); // Check for unexpected names for (final FileObject file : files) { assertTrue(names.contains(file.getName().getBaseName())); } } /** * A test listener. */ private static class TestListener implements FileListener { private final FileObject file; private final ArrayList<Object> events = new ArrayList<>(); private static final Object CREATE = "create"; private static final Object DELETE = "delete"; private static final Object CHANGED = "changed"; public TestListener(final FileObject file) { this.file = file; } /** * Called when a file is created. */ @Override public void fileCreated(final FileChangeEvent event) { assertTrue("Unexpected create event", events.size() > 0); assertSame("Expecting a create event", CREATE, events.remove(0)); assertEquals(file, event.getFile()); try { assertTrue(file.exists()); } catch (final FileSystemException e) { fail(); } } /** * Called when a file is deleted. */ @Override public void fileDeleted(final FileChangeEvent event) { assertTrue("Unexpected delete event", events.size() > 0); assertSame("Expecting a delete event", DELETE, events.remove(0)); assertEquals(file, event.getFile()); try { assertTrue(!file.exists()); } catch (final FileSystemException e) { fail(); } } @Override public void fileChanged(final FileChangeEvent event) throws Exception { assertTrue("Unexpected changed event", events.size() > 0); assertSame("Expecting a changed event", CHANGED, events.remove(0)); assertEquals(file, event.getFile()); try { assertTrue(!file.exists()); } catch (final FileSystemException e) { fail(); } } public void addCreateEvent() { events.add(CREATE); } public void addDeleteEvent() { events.add(DELETE); } public void assertFinished() { assertEquals("Missing event", 0, events.size()); } } /** * Tests file write to and from the same filesystem type */ public void testWriteSameFileSystem() throws Exception { final FileObject scratchFolder = createScratchFolder(); // Create direct child of the test folder final FileObject fileSource = scratchFolder.resolveFile("file1.txt"); assertTrue(!fileSource.exists()); // Create the source file final String expectedString = "Here is some sample content for the file. Blah Blah Blah."; final OutputStream expectedOutputStream = fileSource.getContent().getOutputStream(); try { expectedOutputStream.write(expectedString.getBytes("utf-8")); } finally { expectedOutputStream.close(); } assertSameContent(expectedString, fileSource); // Make sure we can copy the new file to another file on the same filesystem final FileObject fileTarget = scratchFolder.resolveFile("file1copy.txt"); assertTrue(!fileTarget.exists()); final FileContent contentSource = fileSource.getContent(); // // Tests FileContent#write(FileContent) contentSource.write(fileTarget.getContent()); assertSameContent(expectedString, fileTarget); // // Tests FileContent#write(OutputStream) OutputStream outputStream = fileTarget.getContent().getOutputStream(); try { contentSource.write(outputStream); } finally { outputStream.close(); } assertSameContent(expectedString, fileTarget); // // Tests FileContent#write(OutputStream, int) outputStream = fileTarget.getContent().getOutputStream(); try { contentSource.write(outputStream, 1234); } finally { outputStream.close(); } assertSameContent(expectedString, fileTarget); } /** * Tests overwriting a file on the same file system. */ public void testOverwriteSameFileSystem() throws Exception { final FileObject scratchFolder = createScratchFolder(); // Create direct child of the test folder final FileObject file = scratchFolder.resolveFile("file1.txt"); assertTrue(!file.exists()); // Create the source file final String content = "Here is some sample content for the file. Blah Blah Blah."; final OutputStream os = file.getContent().getOutputStream(); try { os.write(content.getBytes("utf-8")); } finally { os.close(); } assertSameContent(content, file); // Make sure we can copy the new file to another file on the same file system final FileObject fileCopy = scratchFolder.resolveFile("file1copy.txt"); assertTrue(!fileCopy.exists()); file.getContent().write(fileCopy); assertSameContent(content, fileCopy); // Make sure we can copy the same new file to the same target file on the same file system assertTrue(fileCopy.exists()); file.getContent().write(fileCopy); assertSameContent(content, fileCopy); // Make sure we can copy the same new file to the same target file on the same file system assertTrue(fileCopy.exists()); file.getContent().write(fileCopy.getContent()); assertSameContent(content, fileCopy); // Make sure we can copy the same new file to the same target file on the same file system assertTrue(fileCopy.exists()); OutputStream outputStream = fileCopy.getContent().getOutputStream(); try { file.getContent().write(outputStream); } finally { outputStream.close(); } assertSameContent(content, fileCopy); // Make sure we can copy the same new file to the same target file on the same file system assertTrue(fileCopy.exists()); outputStream = fileCopy.getContent().getOutputStream(); try { file.getContent().write(outputStream, 1234); } finally { outputStream.close(); } assertSameContent(content, fileCopy); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.python; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.intellij.util.containers.ContainerUtil; import com.jetbrains.python.psi.LanguageLevel; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; /** * @author dcheryasov */ @NonNls public final class PyNames { public static final String SITE_PACKAGES = "site-packages"; public static final String DIST_PACKAGES = "dist-packages"; /** * int type */ public static final String TYPE_INT = "int"; public static final String TYPE_LONG = "long"; /** * unicode string type (see {@link #TYPE_STRING_TYPES} */ public static final String TYPE_UNICODE = "unicode"; /** * string type (see {@link #TYPE_STRING_TYPES} */ public static final String TYPE_STR = "str"; /** * Any string type */ public static final List<String> TYPE_STRING_TYPES = ContainerUtil.immutableList(TYPE_UNICODE, TYPE_STR); /** * date type */ public static final String TYPE_DATE = "datetime.date"; /** * datetime type */ public static final String TYPE_DATE_TIME = "datetime.datetime"; /** * time type */ public static final String TYPE_TIME = "datetime.time"; public static final String TYPE_BYTES = "bytes"; public static final String TYPE_BYTEARRAY = "bytearray"; public static final String TYPE_ENUM = "enum.Enum"; public static final String PYTHON_SDK_ID_NAME = "Python SDK"; public static final String VERBOSE_REG_EXP_LANGUAGE_ID = "PythonVerboseRegExp"; @NonNls public static final String PYTHON_MODULE_ID = "PYTHON_MODULE"; public static final String TESTCASE_SETUP_NAME = "setUp"; public static final String PY_DOCSTRING_ID = "PyDocstring"; public static final String END_WILDCARD = ".*"; private PyNames() { } public static final String INIT = "__init__"; public static final String DUNDER_DICT = "__dict__"; public static final String DOT_PY = ".py"; public static final String DOT_PYI = ".pyi"; public static final String INIT_DOT_PY = INIT + DOT_PY; public static final String INIT_DOT_PYI = INIT + DOT_PYI; public static final String SETUP_DOT_PY = "setup" + DOT_PY; public static final String NEW = "__new__"; public static final String GETATTR = "__getattr__"; public static final String GETATTRIBUTE = "__getattribute__"; public static final String GET = "__get__"; public static final String __CLASS__ = "__class__"; public static final String DUNDER_METACLASS = "__metaclass__"; public static final String METACLASS = "metaclass"; public static final String TYPE = "type"; public static final String SUPER = "super"; public static final String OBJECT = "object"; public static final String NONE = "None"; public static final String TRUE = "True"; public static final String FALSE = "False"; public static final String ELLIPSIS = "..."; public static final String FUNCTION = "function"; public static final String TYPES_FUNCTION_TYPE = "types.FunctionType"; public static final String TYPES_METHOD_TYPE = "types.UnboundMethodType"; public static final String FUTURE_MODULE = "__future__"; public static final String UNICODE_LITERALS = "unicode_literals"; public static final String CLASSMETHOD = "classmethod"; public static final String STATICMETHOD = "staticmethod"; public static final String OVERLOAD = "overload"; public static final String PROPERTY = "property"; public static final String SETTER = "setter"; public static final String DELETER = "deleter"; public static final String GETTER = "getter"; public static final String ALL = "__all__"; public static final String SLOTS = "__slots__"; public static final String DEBUG = "__debug__"; public static final String ISINSTANCE = "isinstance"; public static final String ASSERT_IS_INSTANCE = "assertIsInstance"; public static final String HAS_ATTR = "hasattr"; public static final String ISSUBCLASS = "issubclass"; public static final String DOC = "__doc__"; public static final String DOCFORMAT = "__docformat__"; public static final String DIRNAME = "dirname"; public static final String ABSPATH = "abspath"; public static final String NORMPATH = "normpath"; public static final String REALPATH = "realpath"; public static final String JOIN = "join"; public static final String REPLACE = "replace"; public static final String FILE = "__file__"; public static final String PARDIR = "pardir"; public static final String CURDIR = "curdir"; public static final String WARN = "warn"; public static final String DEPRECATION_WARNING = "DeprecationWarning"; public static final String PENDING_DEPRECATION_WARNING = "PendingDeprecationWarning"; public static final String CONTAINER = "Container"; public static final String HASHABLE = "Hashable"; public static final String ITERABLE = "Iterable"; public static final String ITERATOR = "Iterator"; public static final String SIZED = "Sized"; public static final String CALLABLE = "Callable"; public static final String SEQUENCE = "Sequence"; public static final String MAPPING = "Mapping"; public static final String MUTABLE_MAPPING = "MutableMapping"; public static final String ABC_SET = "Set"; public static final String ABC_MUTABLE_SET = "MutableSet"; public static final String PATH_LIKE = "PathLike"; public static final String BUILTIN_PATH_LIKE = "_PathLike"; public static final String AWAITABLE = "Awaitable"; public static final String ASYNC_ITERABLE = "AsyncIterable"; public static final String ABC_NUMBER = "Number"; public static final String ABC_COMPLEX = "Complex"; public static final String ABC_REAL = "Real"; public static final String ABC_RATIONAL = "Rational"; public static final String ABC_INTEGRAL = "Integral"; public static final String CONTAINS = "__contains__"; public static final String HASH = "__hash__"; public static final String ITER = "__iter__"; public static final String NEXT = "next"; public static final String DUNDER_NEXT = "__next__"; public static final String LEN = "__len__"; public static final String CALL = "__call__"; public static final String GETITEM = "__getitem__"; public static final String SETITEM = "__setitem__"; public static final String DELITEM = "__delitem__"; public static final String POS = "__pos__"; public static final String NEG = "__neg__"; public static final String DIV = "__div__"; public static final String TRUEDIV = "__truediv__"; public static final String AITER = "__aiter__"; public static final String ANEXT = "__anext__"; public static final String AENTER = "__aenter__"; public static final String AEXIT = "__aexit__"; public static final String DUNDER_AWAIT = "__await__"; public static final String SIZEOF = "__sizeof__"; public static final String INIT_SUBCLASS = "__init_subclass__"; public static final String FSPATH = "__fspath__"; public static final String COMPLEX = "__complex__"; public static final String FLOAT = "__float__"; public static final String INT = "__int__"; public static final String BYTES = "__bytes__"; public static final String ABS = "__abs__"; public static final String ROUND = "__round__"; public static final String CLASS_GETITEM = "__class_getitem__"; public static final String PREPARE = "__prepare__"; public static final String NAME = "__name__"; public static final String ENTER = "__enter__"; public static final String EXIT = "__exit__"; public static final String CALLABLE_BUILTIN = "callable"; public static final String NAMEDTUPLE = "namedtuple"; public static final String COLLECTIONS = "collections"; public static final String COLLECTIONS_NAMEDTUPLE_PY2 = COLLECTIONS + "." + NAMEDTUPLE; public static final String COLLECTIONS_NAMEDTUPLE_PY3 = COLLECTIONS + "." + INIT + "." + NAMEDTUPLE; public static final String FORMAT = "format"; public static final String ABSTRACTMETHOD = "abstractmethod"; public static final String ABSTRACTPROPERTY = "abstractproperty"; public static final String ABC_META_CLASS = "ABCMeta"; public static final String ABC = "abc.ABC"; public static final String ABC_META = "abc.ABCMeta"; public static final String TUPLE = "tuple"; public static final String SET = "set"; public static final String SLICE = "slice"; public static final String DICT = "dict"; public static final String KEYS = "keys"; public static final String APPEND = "append"; public static final String EXTEND = "extend"; public static final String UPDATE = "update"; public static final String CLEAR = "clear"; public static final String POP = "pop"; public static final String POPITEM = "popitem"; public static final String SETDEFAULT = "setdefault"; public static final String PASS = "pass"; public static final String NOSE_TEST = "nose"; public static final String PY_TEST = "pytest"; public static final String TRIAL_TEST = "Twisted"; public static final String TEST_CASE = "TestCase"; public static final String PYCACHE = "__pycache__"; public static final String NOT_IMPLEMENTED_ERROR = "NotImplementedError"; public static final String UNKNOWN_TYPE = "Any"; public static final String UNNAMED_ELEMENT = "<unnamed>"; public static final String UNDERSCORE = "_"; /** * Contains all known predefined names of "__foo__" form. */ public static final Set<String> UNDERSCORED_ATTRIBUTES = ImmutableSet.of( "__all__", "__annotations__", "__author__", "__bases__", "__closure__", "__code__", "__defaults__", "__dict__", "__dir__", "__doc__", "__docformat__", "__file__", "__func__", "__globals__", "__kwdefaults__", "__members__", "__metaclass__", "__mod__", "__module__", "__mro__", "__name__", "__path__", "__qualname__", "__self__", "__slots__", "__version__" ); public static final Set<String> COMPARISON_OPERATORS = ImmutableSet.of( "__eq__", "__ne__", "__lt__", "__le__", "__gt__", "__ge__", "__cmp__", "__contains__" ); public static final Set<String> SUBSCRIPTION_OPERATORS = ImmutableSet.of( GETITEM, SETITEM, DELITEM ); public static class BuiltinDescription { private final String mySignature; public BuiltinDescription(String signature) { mySignature = signature; } public String getSignature() { return mySignature; } // TODO: doc string, too } private static final BuiltinDescription _only_self_descr = new BuiltinDescription("(self)"); private static final BuiltinDescription _self_other_descr = new BuiltinDescription("(self, other)"); private static final BuiltinDescription _self_item_descr = new BuiltinDescription("(self, item)"); private static final BuiltinDescription _self_key_descr = new BuiltinDescription("(self, key)"); private static final BuiltinDescription _exit_descr = new BuiltinDescription("(self, exc_type, exc_val, exc_tb)"); private static final ImmutableMap<String, BuiltinDescription> BuiltinMethods = ImmutableMap.<String, BuiltinDescription>builder() .put(ABS, _only_self_descr) .put("__add__", _self_other_descr) .put("__and__", _self_other_descr) //_BuiltinMethods.put("__all__", _only_self_descr); //_BuiltinMethods.put("__author__", _only_self_descr); //_BuiltinMethods.put("__bases__", _only_self_descr); .put("__call__", new BuiltinDescription("(self, *args, **kwargs)")) .put("__ceil__", _only_self_descr) //_BuiltinMethods.put("__class__", _only_self_descr); .put("__cmp__", _self_other_descr) .put("__coerce__", _self_other_descr) .put(COMPLEX, _only_self_descr) .put("__contains__", _self_item_descr) .put("__copy__", _only_self_descr) //_BuiltinMethods.put("__debug__", _only_self_descr); .put("__deepcopy__", new BuiltinDescription("(self, memodict={})")) .put("__del__", _only_self_descr) .put("__delete__", new BuiltinDescription("(self, instance)")) .put("__delattr__", _self_item_descr) .put("__delitem__", _self_key_descr) .put("__delslice__", new BuiltinDescription("(self, i, j)")) //_BuiltinMethods.put("__dict__", _only_self_descr); .put("__divmod__", _self_other_descr) //_BuiltinMethods.put("__doc__", _only_self_descr); //_BuiltinMethods.put("__docformat__", _only_self_descr); .put("__enter__", _only_self_descr) .put("__exit__", _exit_descr) .put("__eq__", _self_other_descr) //_BuiltinMethods.put("__file__", _only_self_descr); .put(FLOAT, _only_self_descr) .put("__floor__", _only_self_descr) .put("__floordiv__", _self_other_descr) //_BuiltinMethods.put("__future__", _only_self_descr); .put("__ge__", _self_other_descr) .put("__get__", new BuiltinDescription("(self, instance, owner)")) .put("__getattr__", _self_item_descr) .put("__getattribute__", _self_item_descr) .put("__getinitargs__", _only_self_descr) .put("__getitem__", _self_item_descr) .put("__getnewargs__", _only_self_descr) //_BuiltinMethods.put("__getslice__", new BuiltinDescription("(self, i, j)")); .put("__getstate__", _only_self_descr) .put("__gt__", _self_other_descr) .put("__hash__", _only_self_descr) .put("__hex__", _only_self_descr) .put("__iadd__", _self_other_descr) .put("__iand__", _self_other_descr) .put("__idiv__", _self_other_descr) .put("__ifloordiv__", _self_other_descr) //_BuiltinMethods.put("__import__", _only_self_descr); .put("__ilshift__", _self_other_descr) .put("__imod__", _self_other_descr) .put("__imul__", _self_other_descr) .put("__index__", _only_self_descr) .put(INIT, _only_self_descr) .put(INT, _only_self_descr) .put("__invert__", _only_self_descr) .put("__ior__", _self_other_descr) .put("__ipow__", _self_other_descr) .put("__irshift__", _self_other_descr) .put("__isub__", _self_other_descr) .put("__iter__", _only_self_descr) .put("__itruediv__", _self_other_descr) .put("__ixor__", _self_other_descr) .put("__le__", _self_other_descr) .put("__len__", _only_self_descr) .put("__long__", _only_self_descr) .put("__lshift__", _self_other_descr) .put("__lt__", _self_other_descr) //_BuiltinMethods.put("__members__", _only_self_descr); //_BuiltinMethods.put("__metaclass__", _only_self_descr); .put("__missing__", _self_key_descr) .put("__mod__", _self_other_descr) //_BuiltinMethods.put("__mro__", _only_self_descr); .put("__mul__", _self_other_descr) //_BuiltinMethods.put("__name__", _only_self_descr); .put("__ne__", _self_other_descr) .put("__neg__", _only_self_descr) .put(NEW, new BuiltinDescription("(cls, *args, **kwargs)")) .put("__oct__", _only_self_descr) .put("__or__", _self_other_descr) //_BuiltinMethods.put("__path__", _only_self_descr); .put("__pos__", _only_self_descr) .put("__pow__", new BuiltinDescription("(self, power, modulo=None)")) .put("__radd__", _self_other_descr) .put("__rand__", _self_other_descr) .put("__rdiv__", _self_other_descr) .put("__rdivmod__", _self_other_descr) .put("__reduce__", _only_self_descr) .put("__reduce_ex__", new BuiltinDescription("(self, protocol)")) .put("__repr__", _only_self_descr) .put("__reversed__", _only_self_descr) .put("__rfloordiv__", _self_other_descr) .put("__rlshift__", _self_other_descr) .put("__rmod__", _self_other_descr) .put("__rmul__", _self_other_descr) .put("__ror__", _self_other_descr) .put("__rpow__", _self_other_descr) .put("__rrshift__", _self_other_descr) .put("__rshift__", _self_other_descr) .put("__rsub__", _self_other_descr) .put("__rtruediv__", _self_other_descr) .put("__rxor__", _self_other_descr) .put("__set__", new BuiltinDescription("(self, instance, value)")) .put("__setattr__", new BuiltinDescription("(self, key, value)")) .put("__setitem__", new BuiltinDescription("(self, key, value)")) .put("__setslice__", new BuiltinDescription("(self, i, j, sequence)")) .put("__setstate__", new BuiltinDescription("(self, state)")) .put(SIZEOF, _only_self_descr) //_BuiltinMethods.put("__self__", _only_self_descr); //_BuiltinMethods.put("__slots__", _only_self_descr); .put("__str__", _only_self_descr) .put("__sub__", _self_other_descr) .put("__truediv__", _self_other_descr) .put("__trunc__", _only_self_descr) .put("__unicode__", _only_self_descr) //_BuiltinMethods.put("__version__", _only_self_descr); .put("__xor__", _self_other_descr) .build(); public static final ImmutableMap<String, BuiltinDescription> PY2_BUILTIN_METHODS = ImmutableMap.<String, BuiltinDescription>builder() .putAll(BuiltinMethods) .put("__nonzero__", _only_self_descr) .put("__div__", _self_other_descr) .put(NEXT, _only_self_descr) .build(); public static final ImmutableMap<String, BuiltinDescription> PY3_BUILTIN_METHODS = ImmutableMap.<String, BuiltinDescription>builder() .putAll(BuiltinMethods) .put("__bool__", _only_self_descr) .put(BYTES, _only_self_descr) .put("__format__", new BuiltinDescription("(self, format_spec)")) .put("__instancecheck__", new BuiltinDescription("(self, instance)")) .put(PREPARE, new BuiltinDescription("(metacls, name, bases)")) .put(ROUND, new BuiltinDescription("(self, n=None)")) .put("__subclasscheck__", new BuiltinDescription("(self, subclass)")) .put(DUNDER_NEXT, _only_self_descr) .build(); public static final ImmutableMap<String, BuiltinDescription> PY35_BUILTIN_METHODS = ImmutableMap.<String, BuiltinDescription>builder() .putAll(PY3_BUILTIN_METHODS) .put("__imatmul__", _self_other_descr) .put("__matmul__", _self_other_descr) .put("__rmatmul__", _self_other_descr) .put(DUNDER_AWAIT, _only_self_descr) .put(AENTER, _only_self_descr) .put(AEXIT, _exit_descr) .put(AITER, _only_self_descr) .put(ANEXT, _only_self_descr) .build(); public static final ImmutableMap<String, BuiltinDescription> PY36_BUILTIN_METHODS = ImmutableMap.<String, BuiltinDescription>builder() .putAll(PY35_BUILTIN_METHODS) .put(INIT_SUBCLASS, new BuiltinDescription("(cls, **kwargs)")) .put("__set_name__", new BuiltinDescription("(self, owner, name)")) .put("__fspath__", _only_self_descr) .build(); public static final ImmutableMap<String, BuiltinDescription> PY37_BUILTIN_METHODS = ImmutableMap.<String, BuiltinDescription>builder() .putAll(PY36_BUILTIN_METHODS) .put(CLASS_GETITEM, new BuiltinDescription("(cls, item)")) .put("__mro_entries__", new BuiltinDescription("(self, bases)")) .build(); @NotNull private static final ImmutableMap<String, BuiltinDescription> PY37_MODULE_BUILTIN_METHODS = ImmutableMap.<String, BuiltinDescription>builder() .put("__getattr__", new BuiltinDescription("(name)")) .put("__dir__", new BuiltinDescription("()")) .build(); public static ImmutableMap<String, BuiltinDescription> getBuiltinMethods(LanguageLevel level) { if (level.isAtLeast(LanguageLevel.PYTHON37)) { return PY37_BUILTIN_METHODS; } else if (level.isAtLeast(LanguageLevel.PYTHON36)) { return PY36_BUILTIN_METHODS; } else if (level.isAtLeast(LanguageLevel.PYTHON35)) { return PY35_BUILTIN_METHODS; } else if (!level.isPython2()) { return PY3_BUILTIN_METHODS; } else { return PY2_BUILTIN_METHODS; } } public static @NotNull Map<String, BuiltinDescription> getModuleBuiltinMethods(@NotNull LanguageLevel level) { if (level.isAtLeast(LanguageLevel.PYTHON37)) { return PY37_MODULE_BUILTIN_METHODS; } return Collections.emptyMap(); } // canonical names, not forced by interpreter public static final String CANONICAL_SELF = "self"; public static final String CANONICAL_CLS = "cls"; public static final String BASESTRING = "basestring"; /* Python keywords */ public static final String CLASS = "class"; public static final String DEF = "def"; public static final String IF = "if"; public static final String ELSE = "else"; public static final String ELIF = "elif"; public static final String TRY = "try"; public static final String EXCEPT = "except"; public static final String FINALLY = "finally"; public static final String WHILE = "while"; public static final String FOR = "for"; public static final String WITH = "with"; public static final String AS = "as"; public static final String ASSERT = "assert"; public static final String DEL = "del"; public static final String EXEC = "exec"; public static final String FROM = "from"; public static final String IMPORT = "import"; public static final String RAISE = "raise"; public static final String PRINT = "print"; public static final String BREAK = "break"; public static final String CONTINUE = "continue"; public static final String GLOBAL = "global"; public static final String RETURN = "return"; public static final String YIELD = "yield"; public static final String NONLOCAL = "nonlocal"; public static final String AND = "and"; public static final String OR = "or"; public static final String IS = "is"; public static final String IN = "in"; public static final String NOT = "not"; public static final String LAMBDA = "lambda"; public static final String ASYNC = "async"; public static final String AWAIT = "await"; /** * Contains keywords as of CPython 2.5. */ public static final Set<String> KEYWORDS = ImmutableSet.of( AND, DEL, FROM, NOT, WHILE, AS, ELIF, GLOBAL, OR, WITH, ASSERT, ELSE, IF, PASS, YIELD, BREAK, EXCEPT, IMPORT, PRINT, CLASS, EXEC, IN, RAISE, CONTINUE, FINALLY, IS, RETURN, DEF, FOR, LAMBDA, TRY ); public static final Set<String> BUILTIN_INTERFACES = ImmutableSet.of( CALLABLE, HASHABLE, ITERABLE, ITERATOR, SIZED, CONTAINER, SEQUENCE, MAPPING, ABC_COMPLEX, ABC_REAL, ABC_RATIONAL, ABC_INTEGRAL, ABC_NUMBER ); /** * TODO: dependency on language level. * * @param name what to check * @return true iff the name is either a keyword or a reserved name, like None. */ public static boolean isReserved(@NonNls String name) { return KEYWORDS.contains(name) || NONE.equals(name); } // NOTE: includes unicode only good for py3k public static final String IDENTIFIER_RE = "\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*"; private final static Pattern IDENTIFIER_PATTERN = Pattern.compile(IDENTIFIER_RE); /** * TODO: dependency on language level. * * @param name what to check * @return true iff name is not reserved and is a well-formed identifier. */ public static boolean isIdentifier(@NotNull @NonNls String name) { return !isReserved(name) && isIdentifierString(name); } public static boolean isIdentifierString(@NotNull @NonNls String name) { return IDENTIFIER_PATTERN.matcher(name).matches(); } public static boolean isRightOperatorName(@Nullable String name) { if ("__rshift__".equals(name)) return false; return name != null && (name.matches("__r[a-z]+__") || CONTAINS.equals(name)); } public static boolean isRightOperatorName(@Nullable String referencedName, @Nullable String calleeName) { if (isRightOperatorName(calleeName)) return true; return referencedName != null && calleeName != null && calleeName.equals(leftToRightComparisonOperatorName(referencedName)); } @Nullable public static String leftToRightOperatorName(@Nullable String name) { if (name == null) return null; final String rightComparisonOperatorName = leftToRightComparisonOperatorName(name); if (rightComparisonOperatorName != null) return rightComparisonOperatorName; return name.replaceFirst("__([a-z]+)__", "__r$1__"); } @Nullable private static String leftToRightComparisonOperatorName(@NotNull String name) { switch (name) { case "__lt__": return "__gt__"; case "__gt__": return "__lt__"; case "__ge__": return "__le__"; case "__le__": return "__ge__"; case "__eq__": case "__ne__": return name; default: return null; } } /** * Available in Python 3 and Python 2 starting from 2.6. * <p/> * Attributes {@code __doc__}, {@code __dict__} and {@code __module__} should be inherited from object. */ public static final Set<String> FUNCTION_SPECIAL_ATTRIBUTES = ImmutableSet.of( "__defaults__", "__globals__", "__closure__", "__code__", "__name__" ); public static final Set<String> LEGACY_FUNCTION_SPECIAL_ATTRIBUTES = ImmutableSet.of( "func_defaults", "func_globals", "func_closure", "func_code", "func_name", "func_doc", "func_dict" ); public static final Set<String> PY3_ONLY_FUNCTION_SPECIAL_ATTRIBUTES = ImmutableSet.of("__annotations__", "__kwdefaults__"); public static final Set<String> METHOD_SPECIAL_ATTRIBUTES = ImmutableSet.of("__func__", "__self__", "__name__"); public static final Set<String> LEGACY_METHOD_SPECIAL_ATTRIBUTES = ImmutableSet.of("im_func", "im_self", "im_class"); public static final String MRO = "mro"; }
package com.google.auto.value.processor; import static com.google.common.truth.Truth.assert_; import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource; import com.google.common.collect.ImmutableList; import com.google.testing.compile.JavaFileObjects; import junit.framework.TestCase; import java.util.List; import javax.tools.JavaFileObject; /** * Tests to ensure annotations are kept on AutoValue generated classes * * @author jmcampanini */ public class PropertyAnnotationsTest extends TestCase { private static final String PROPERTY_ANNOTATION_TEST = "com.google.auto.value.processor.PropertyAnnotationsTest"; private static final String TEST_ANNOTATION = "@com.google.auto.value.processor.PropertyAnnotationsTest.TestAnnotation"; public static enum TestEnum { A, B; @Override public String toString() { // used to prove that the method we determine the value does not use the `toString()` method // of the enum return "not the same value"; } } public static @interface TestAnnotation { byte testByte() default 1; short testShort() default 2; int testInt() default 3; long testLong() default 4L; float testFloat() default 5.6f; double testDouble() default 7.8d; char testChar() default 'a'; String testString() default "10"; boolean testBoolean() default false; Class<?> testClass() default TestEnum.class; TestEnum testEnum() default TestEnum.A; OtherAnnotation testAnnotation() default @OtherAnnotation(foo = 23, bar = "baz"); } public static @interface OtherAnnotation { int foo() default 123; String bar() default "bar"; } public static @interface TestAnnotationArray { byte[] testBytes() default {1, 2}; short[] testShorts() default {3, 4}; int[] testInts() default {5, 6}; long[] testLongs() default {7L, 8L}; float[] testFloats() default {9.1f, 2.3f}; double[] testDoubles() default {4.5d, 6.7d}; char[] testChars() default {'a', 'b'}; String[] testStrings() default {"cde", "fgh"}; boolean[] testBooleans() default {true, false}; Class<?>[] testClasses() default {TestEnum.class, TestEnum.class}; TestEnum[] testEnums() default {TestEnum.A, TestEnum.B}; OtherAnnotation[] testAnnotations() default {@OtherAnnotation(foo = 999), @OtherAnnotation(bar = "baz")}; } private JavaFileObject sourceCode(List<String> imports, List<String> annotations) { ImmutableList<String> list = ImmutableList.<String>builder() .add( "package foo.bar;", "", "import com.google.auto.value.AutoValue;" ) .addAll(imports) .add( "", "@AutoValue", "public abstract class Baz {" ) .addAll(annotations) .add( " public abstract int buh();", "", " public static Baz create(int buh) {", " return new AutoValue_Baz(buh);", " }", "}" ) .build(); String[] lines = list.toArray(new String[list.size()]); return JavaFileObjects.forSourceLines("foo.bar.Baz", lines); } private JavaFileObject expectedCode( List<String> annotations, String constructorParamAnnotation) { String constructorParamPrefix; if (constructorParamAnnotation == null) { constructorParamPrefix = ""; } else { constructorParamPrefix = constructorParamAnnotation + " "; } ImmutableList<String> list = ImmutableList.<String>builder() .add( "package foo.bar;", "", "import javax.annotation.Generated;", "", "@Generated(\"" + AutoValueProcessor.class.getName() + "\")", "final class AutoValue_Baz extends Baz {", " private final int buh;", "", " AutoValue_Baz(" + constructorParamPrefix + "int buh) {", " this.buh = buh;", " }", "" ) .addAll(annotations) .add( " @Override public int buh() {", " return buh;", " }", "", " @Override public String toString() {", " return \"Baz{\"", " + \"buh=\" + buh", " + \"}\";", " }", "", " @Override public boolean equals(Object o) {", " if (o == this) {", " return true;", " }", " if (o instanceof Baz) {", " Baz that = (Baz) o;", " return (this.buh == that.buh());", " }", " return false;", " }", "", " @Override public int hashCode() {", " int h = 1;", " h *= 1000003;", " h ^= this.buh;", " return h;", " }", "}" ) .build(); String[] lines = list.toArray(new String[list.size()]); return JavaFileObjects.forSourceLines("foo.bar.AutoValue_Baz", lines); } private void assertGeneratedMatches( List<String> imports, List<String> annotations, List<String> expectedAnnotations, String expectedConstructorParamAnnotation) { JavaFileObject javaFileObject = sourceCode(imports, annotations); JavaFileObject expectedOutput = expectedCode(expectedAnnotations, expectedConstructorParamAnnotation); assert_().about(javaSource()) .that(javaFileObject) .processedWith(new AutoValueProcessor()) .compilesWithoutError() .and().generatesSources(expectedOutput); } private void assertGeneratedMatches( List<String> imports, List<String> annotations, List<String> expectedAnnotations) { assertGeneratedMatches(imports, annotations, expectedAnnotations, null); } public void testSimpleAnnotation() { assertGeneratedMatches( ImmutableList.of("import javax.annotation.Nullable;"), ImmutableList.of("@Nullable"), ImmutableList.of("@javax.annotation.Nullable"), "@javax.annotation.Nullable"); } public void testSingleStringValueAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of("@SuppressWarnings(\"a\")"), ImmutableList.of("@java.lang.SuppressWarnings(value={\"a\"})")); } public void testMultiStringValueAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of("@SuppressWarnings({\"a\", \"b\"})"), ImmutableList.of("@java.lang.SuppressWarnings(value={\"a\", \"b\"})")); } public void testNumberValueAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "(testShort = 1, testInt = 2, testLong = 3L)"), ImmutableList.of(TEST_ANNOTATION + "(testShort = 1, testInt = 2, testLong = 3L)")); } public void testByteValueAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "(testByte = 0)"), ImmutableList.of(TEST_ANNOTATION + "(testByte = 0)")); } public void testDecimalValueAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "(testDouble = 1.2d, testFloat = 3.4f)"), ImmutableList.of(TEST_ANNOTATION + "(testDouble = 1.2d, testFloat = 3.4f)")); } public void testOtherValuesAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "(testBoolean = true, testString = \"hallo\", testChar = 'a')"), ImmutableList.of(TEST_ANNOTATION + "(testBoolean = true, testString = \"hallo\", testChar = 'a')")); } public void testClassAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "(testClass = String.class)"), ImmutableList.of(TEST_ANNOTATION + "(testClass = java.lang.String.class)")); } public void testEnumAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "(testEnum = " + PROPERTY_ANNOTATION_TEST + ".TestEnum.A)"), ImmutableList.of(TEST_ANNOTATION + "(testEnum = " + PROPERTY_ANNOTATION_TEST + ".TestEnum.A)")); } public void testEmptyAnnotationAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "(testAnnotation = @" + PROPERTY_ANNOTATION_TEST + ".OtherAnnotation)"), ImmutableList.of(TEST_ANNOTATION + "(testAnnotation = @" + PROPERTY_ANNOTATION_TEST + ".OtherAnnotation)")); } public void testValuedAnnotationAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "(testAnnotation = @" + PROPERTY_ANNOTATION_TEST + ".OtherAnnotation(foo=999))"), ImmutableList.of(TEST_ANNOTATION + "(testAnnotation = @" + PROPERTY_ANNOTATION_TEST + ".OtherAnnotation(foo=999))")); } public void testNumberArrayAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "Array(testShorts = {2, 3}, testInts = {4, 5}, testLongs = {6L, 7L})"), ImmutableList.of(TEST_ANNOTATION + "Array(testShorts = {2, 3}, testInts = {4, 5}, testLongs = {6L, 7L})")); } public void testByteArrayAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "Array(testBytes = {0, 1})"), ImmutableList.of(TEST_ANNOTATION + "Array(testBytes = {0, 1})")); } public void testDecimalArrayAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "Array(testDoubles = {1.2d, 3.4d}, testFloats = {5.6f, 7.8f})"), ImmutableList.of(TEST_ANNOTATION + "Array(testDoubles = {1.2d, 3.4d}, testFloats = {5.6f, 7.8f})")); } public void testOtherArrayAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "Array(testBooleans = {false, false}," + " testStrings = {\"aaa\", \"bbb\"}, testChars={'x', 'y'})"), ImmutableList.of(TEST_ANNOTATION + "Array(testBooleans = {false, false}," + " testStrings = {\"aaa\", \"bbb\"}, testChars={'x', 'y'})")); } public void testClassArrayAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "Array(testClasses = {String.class, Long.class})"), ImmutableList.of(TEST_ANNOTATION + "Array(testClasses = {java.lang.String.class, java.lang.Long.class})")); } public void testImportedClassArrayAnnotation() { assertGeneratedMatches( ImmutableList.of("import javax.annotation.Nullable;"), ImmutableList.of(TEST_ANNOTATION + "Array(testClasses = {Nullable.class, Long.class})"), ImmutableList.of(TEST_ANNOTATION + "Array(testClasses = {javax.annotation.Nullable.class, java.lang.Long.class})")); } public void testEnumArrayAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "Array(testEnums = {" + PROPERTY_ANNOTATION_TEST + ".TestEnum.A})"), ImmutableList.of(TEST_ANNOTATION + "Array(testEnums = {" + PROPERTY_ANNOTATION_TEST + ".TestEnum.A})")); } public void testArrayOfEmptyAnnotationAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "Array(testAnnotations = {@" + PROPERTY_ANNOTATION_TEST + ".OtherAnnotation})"), ImmutableList.of(TEST_ANNOTATION + "Array(testAnnotations = {@" + PROPERTY_ANNOTATION_TEST + ".OtherAnnotation})")); } public void testArrayOfValuedAnnotationAnnotation() { assertGeneratedMatches( ImmutableList.<String>of(), ImmutableList.of(TEST_ANNOTATION + "Array(testAnnotations = {@" + PROPERTY_ANNOTATION_TEST + ".OtherAnnotation(foo = 999)})"), ImmutableList.of(TEST_ANNOTATION + "Array(testAnnotations = {@" + PROPERTY_ANNOTATION_TEST + ".OtherAnnotation(foo = 999)})")); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.command.impl; import com.intellij.CommonBundle; import com.intellij.ide.DataManager; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandEvent; import com.intellij.openapi.command.CommandListener; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.UndoConfirmationPolicy; import com.intellij.openapi.command.undo.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.fileEditor.*; import com.intellij.openapi.fileEditor.impl.text.TextEditorProvider; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ex.ProjectEx; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.ex.WindowManagerEx; import com.intellij.psi.ExternalChangeAction; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.HashSet; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.awt.*; import java.util.*; import java.util.List; public class UndoManagerImpl extends UndoManager implements Disposable { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.command.impl.UndoManagerImpl"); private static final int COMMANDS_TO_KEEP_LIVE_QUEUES = 100; private static final int COMMAND_TO_RUN_COMPACT = 20; private static final int FREE_QUEUES_LIMIT = 30; @Nullable private final ProjectEx myProject; private final CommandProcessor myCommandProcessor; private UndoProvider[] myUndoProviders; private CurrentEditorProvider myEditorProvider; private final UndoRedoStacksHolder myUndoStacksHolder = new UndoRedoStacksHolder(true); private final UndoRedoStacksHolder myRedoStacksHolder = new UndoRedoStacksHolder(false); private final CommandMerger myMerger; private CommandMerger myCurrentMerger; private Project myCurrentActionProject = DummyProject.getInstance(); private int myCommandTimestamp = 1; private int myCommandLevel; private enum OperationState { NONE, UNDO, REDO } private OperationState myCurrentOperationState = OperationState.NONE; private DocumentReference myOriginatorReference; public static boolean isRefresh() { return ApplicationManager.getApplication().hasWriteAction(ExternalChangeAction.class); } public static int getGlobalUndoLimit() { return Registry.intValue("undo.globalUndoLimit"); } public static int getDocumentUndoLimit() { return Registry.intValue("undo.documentUndoLimit"); } public UndoManagerImpl(CommandProcessor commandProcessor) { this(null, commandProcessor); } public UndoManagerImpl(@Nullable ProjectEx project, CommandProcessor commandProcessor) { myProject = project; myCommandProcessor = commandProcessor; if (myProject == null || !myProject.isDefault()) { runStartupActivity(); } myMerger = new CommandMerger(this); } @Nullable public Project getProject() { return myProject; } @Override public void dispose() { } private void runStartupActivity() { myEditorProvider = new FocusBasedCurrentEditorProvider(); myCommandProcessor.addCommandListener(new CommandListener() { private boolean myStarted; @Override public void commandStarted(CommandEvent event) { if (myProject != null && myProject.isDisposed()) return; onCommandStarted(event.getProject(), event.getUndoConfirmationPolicy(), event.shouldRecordActionForOriginalDocument()); } @Override public void commandFinished(CommandEvent event) { if (myProject != null && myProject.isDisposed()) return; onCommandFinished(event.getProject(), event.getCommandName(), event.getCommandGroupId()); } @Override public void undoTransparentActionStarted() { if (myProject != null && myProject.isDisposed()) return; if (!isInsideCommand()) { myStarted = true; onCommandStarted(myProject, UndoConfirmationPolicy.DEFAULT, true); } } @Override public void undoTransparentActionFinished() { if (myProject != null && myProject.isDisposed()) return; if (myStarted) { myStarted = false; onCommandFinished(myProject, "", null); } } }, this); Disposer.register(this, new DocumentUndoProvider(myProject)); myUndoProviders = myProject == null ? Extensions.getExtensions(UndoProvider.EP_NAME) : Extensions.getExtensions(UndoProvider.PROJECT_EP_NAME, myProject); for (UndoProvider undoProvider : myUndoProviders) { if (undoProvider instanceof Disposable) { Disposer.register(this, (Disposable)undoProvider); } } } public boolean isActive() { return Comparing.equal(myProject, myCurrentActionProject); } private boolean isInsideCommand() { return myCommandLevel > 0; } private void onCommandStarted(final Project project, UndoConfirmationPolicy undoConfirmationPolicy, boolean recordOriginalReference) { if (myCommandLevel == 0) { for (UndoProvider undoProvider : myUndoProviders) { undoProvider.commandStarted(project); } myCurrentActionProject = project; } commandStarted(undoConfirmationPolicy, myProject == project && recordOriginalReference); LOG.assertTrue(myCommandLevel == 0 || !(myCurrentActionProject instanceof DummyProject)); } private void onCommandFinished(final Project project, final String commandName, final Object commandGroupId) { commandFinished(commandName, commandGroupId); if (myCommandLevel == 0) { for (UndoProvider undoProvider : myUndoProviders) { undoProvider.commandFinished(project); } myCurrentActionProject = DummyProject.getInstance(); } LOG.assertTrue(myCommandLevel == 0 || !(myCurrentActionProject instanceof DummyProject)); } private void commandStarted(UndoConfirmationPolicy undoConfirmationPolicy, boolean recordOriginalReference) { if (myCommandLevel == 0) { myCurrentMerger = new CommandMerger(this, CommandProcessor.getInstance().isUndoTransparentActionInProgress()); if (recordOriginalReference && myProject != null) { Editor editor = null; final Application application = ApplicationManager.getApplication(); if (application.isUnitTestMode() || application.isHeadlessEnvironment()) { editor = CommonDataKeys.EDITOR.getData(DataManager.getInstance().getDataContext()); } else { Component component = WindowManagerEx.getInstanceEx().getFocusedComponent(myProject); if (component != null) { editor = CommonDataKeys.EDITOR.getData(DataManager.getInstance().getDataContext(component)); } } if (editor != null) { Document document = editor.getDocument(); VirtualFile file = FileDocumentManager.getInstance().getFile(document); if (file != null && file.isValid()) { myOriginatorReference = DocumentReferenceManager.getInstance().create(file); } } } } LOG.assertTrue(myCurrentMerger != null, String.valueOf(myCommandLevel)); myCurrentMerger.setBeforeState(getCurrentState()); myCurrentMerger.mergeUndoConfirmationPolicy(undoConfirmationPolicy); myCommandLevel++; } private void commandFinished(String commandName, Object groupId) { if (myCommandLevel == 0) return; // possible if command listener was added within command myCommandLevel--; if (myCommandLevel > 0) return; if (myProject != null && myCurrentMerger.hasActions() && !myCurrentMerger.isTransparent() && myCurrentMerger.isPhysical()) { addFocusedDocumentAsAffected(); } myOriginatorReference = null; myCurrentMerger.setAfterState(getCurrentState()); myMerger.commandFinished(commandName, groupId, myCurrentMerger); disposeCurrentMerger(); } private void addFocusedDocumentAsAffected() { if (myOriginatorReference == null || myCurrentMerger.hasChangesOf(myOriginatorReference, true)) return; final DocumentReference[] refs = {myOriginatorReference}; myCurrentMerger.addAction(new MentionOnlyUndoableAction(refs)); } private EditorAndState getCurrentState() { FileEditor editor = myEditorProvider.getCurrentEditor(); if (editor == null) { return null; } if (!editor.isValid()) { return null; } return new EditorAndState(editor, editor.getState(FileEditorStateLevel.UNDO)); } private void disposeCurrentMerger() { LOG.assertTrue(myCommandLevel == 0); if (myCurrentMerger != null) { myCurrentMerger = null; } } @Override public void nonundoableActionPerformed(@NotNull final DocumentReference ref, final boolean isGlobal) { ApplicationManager.getApplication().assertIsDispatchThread(); if (myProject != null && myProject.isDisposed()) return; undoableActionPerformed(new NonUndoableAction(ref, isGlobal)); } @Override public void undoableActionPerformed(@NotNull UndoableAction action) { ApplicationManager.getApplication().assertIsDispatchThread(); if (myProject != null && myProject.isDisposed()) return; if (myCurrentOperationState != OperationState.NONE) return; if (myCommandLevel == 0) { LOG.assertTrue(action instanceof NonUndoableAction, "Undoable actions allowed inside commands only (see com.intellij.openapi.command.CommandProcessor.executeCommand())"); commandStarted(UndoConfirmationPolicy.DEFAULT, false); myCurrentMerger.addAction(action); commandFinished("", null); return; } if (isRefresh()) myOriginatorReference = null; myCurrentMerger.addAction(action); } public void markCurrentCommandAsGlobal() { myCurrentMerger.markAsGlobal(); } void addAffectedDocuments(@NotNull Document... docs) { if (!isInsideCommand()) { LOG.error("Must be called inside command"); return; } List<DocumentReference> refs = new ArrayList<>(docs.length); for (Document each : docs) { // is document's file still valid VirtualFile file = FileDocumentManager.getInstance().getFile(each); if (file != null && !file.isValid()) continue; refs.add(DocumentReferenceManager.getInstance().create(each)); } myCurrentMerger.addAdditionalAffectedDocuments(refs); } public void addAffectedFiles(@NotNull VirtualFile... files) { if (!isInsideCommand()) { LOG.error("Must be called inside command"); return; } List<DocumentReference> refs = new ArrayList<>(files.length); for (VirtualFile each : files) { refs.add(DocumentReferenceManager.getInstance().create(each)); } myCurrentMerger.addAdditionalAffectedDocuments(refs); } public void invalidateActionsFor(@NotNull DocumentReference ref) { ApplicationManager.getApplication().assertIsDispatchThread(); myMerger.invalidateActionsFor(ref); if (myCurrentMerger != null) myCurrentMerger.invalidateActionsFor(ref); myUndoStacksHolder.invalidateActionsFor(ref); myRedoStacksHolder.invalidateActionsFor(ref); } @Override public void undo(@Nullable FileEditor editor) { ApplicationManager.getApplication().assertIsDispatchThread(); LOG.assertTrue(isUndoAvailable(editor)); undoOrRedo(editor, true); } @Override public void redo(@Nullable FileEditor editor) { ApplicationManager.getApplication().assertIsDispatchThread(); LOG.assertTrue(isRedoAvailable(editor)); undoOrRedo(editor, false); } private void undoOrRedo(final FileEditor editor, final boolean isUndo) { myCurrentOperationState = isUndo ? OperationState.UNDO : OperationState.REDO; final RuntimeException[] exception = new RuntimeException[1]; Runnable executeUndoOrRedoAction = () -> { try { CopyPasteManager.getInstance().stopKillRings(); myMerger.undoOrRedo(editor, isUndo); } catch (RuntimeException ex) { exception[0] = ex; } finally { myCurrentOperationState = OperationState.NONE; } }; String name = getUndoOrRedoActionNameAndDescription(editor, isUndoInProgress()).second; CommandProcessor.getInstance() .executeCommand(myProject, executeUndoOrRedoAction, name, null, myMerger.getUndoConfirmationPolicy()); if (exception[0] != null) throw exception[0]; } @Override public boolean isUndoInProgress() { return myCurrentOperationState == OperationState.UNDO; } @Override public boolean isRedoInProgress() { return myCurrentOperationState == OperationState.REDO; } @Override public boolean isUndoAvailable(@Nullable FileEditor editor) { return isUndoOrRedoAvailable(editor, true); } @Override public boolean isRedoAvailable(@Nullable FileEditor editor) { return isUndoOrRedoAvailable(editor, false); } boolean isUndoOrRedoAvailable(@Nullable FileEditor editor, boolean undo) { ApplicationManager.getApplication().assertIsDispatchThread(); Collection<DocumentReference> refs = getDocRefs(editor); return refs != null && isUndoOrRedoAvailable(refs, undo); } boolean isUndoOrRedoAvailable(@NotNull DocumentReference ref) { Set<DocumentReference> refs = Collections.singleton(ref); return isUndoOrRedoAvailable(refs, true) || isUndoOrRedoAvailable(refs, false); } private boolean isUndoOrRedoAvailable(@NotNull Collection<DocumentReference> refs, boolean isUndo) { if (isUndo && myMerger.isUndoAvailable(refs)) return true; UndoRedoStacksHolder stackHolder = getStackHolder(isUndo); return stackHolder.canBeUndoneOrRedone(refs); } private static Collection<DocumentReference> getDocRefs(@Nullable FileEditor editor) { if (editor instanceof TextEditor && ((TextEditor)editor).getEditor().isViewer()) { return null; } if (editor == null) { return Collections.emptyList(); } return getDocumentReferences(editor); } @NotNull static Set<DocumentReference> getDocumentReferences(@NotNull FileEditor editor) { Set<DocumentReference> result = new THashSet<>(); if (editor instanceof DocumentReferenceProvider) { result.addAll(((DocumentReferenceProvider)editor).getDocumentReferences()); return result; } Document[] documents = TextEditorProvider.getDocuments(editor); if (documents != null) { for (Document each : documents) { Document original = getOriginal(each); // KirillK : in AnAction.update we may have an editor with an invalid file VirtualFile f = FileDocumentManager.getInstance().getFile(each); if (f != null && !f.isValid()) continue; result.add(DocumentReferenceManager.getInstance().create(original)); } } return result; } @NotNull private UndoRedoStacksHolder getStackHolder(boolean isUndo) { return isUndo ? myUndoStacksHolder : myRedoStacksHolder; } @NotNull @Override public Pair<String, String> getUndoActionNameAndDescription(FileEditor editor) { return getUndoOrRedoActionNameAndDescription(editor, true); } @NotNull @Override public Pair<String, String> getRedoActionNameAndDescription(FileEditor editor) { return getUndoOrRedoActionNameAndDescription(editor, false); } @NotNull private Pair<String, String> getUndoOrRedoActionNameAndDescription(FileEditor editor, boolean undo) { String desc = isUndoOrRedoAvailable(editor, undo) ? doFormatAvailableUndoRedoAction(editor, undo) : null; if (desc == null) desc = ""; String shortActionName = StringUtil.first(desc, 30, true); if (desc.isEmpty()) { desc = undo ? ActionsBundle.message("action.undo.description.empty") : ActionsBundle.message("action.redo.description.empty"); } return Pair.create((undo ? ActionsBundle.message("action.undo.text", shortActionName) : ActionsBundle.message("action.redo.text", shortActionName)).trim(), (undo ? ActionsBundle.message("action.undo.description", desc) : ActionsBundle.message("action.redo.description", desc)).trim()); } @Nullable private String doFormatAvailableUndoRedoAction(FileEditor editor, boolean isUndo) { Collection<DocumentReference> refs = getDocRefs(editor); if (refs == null) return null; if (isUndo && myMerger.isUndoAvailable(refs)) return myMerger.getCommandName(); return getStackHolder(isUndo).getLastAction(refs).getCommandName(); } @NotNull UndoRedoStacksHolder getUndoStacksHolder() { return myUndoStacksHolder; } @NotNull UndoRedoStacksHolder getRedoStacksHolder() { return myRedoStacksHolder; } int nextCommandTimestamp() { return ++myCommandTimestamp; } @NotNull private static Document getOriginal(@NotNull Document document) { Document result = document.getUserData(ORIGINAL_DOCUMENT); return result == null ? document : result; } static boolean isCopy(@NotNull Document d) { return d.getUserData(ORIGINAL_DOCUMENT) != null; } protected void compact() { if (myCurrentOperationState == OperationState.NONE && myCommandTimestamp % COMMAND_TO_RUN_COMPACT == 0) { doCompact(); } } private void doCompact() { Collection<DocumentReference> refs = collectReferencesWithoutMergers(); Collection<DocumentReference> openDocs = new HashSet<>(); for (DocumentReference each : refs) { VirtualFile file = each.getFile(); if (file == null) { Document document = each.getDocument(); if (document != null && EditorFactory.getInstance().getEditors(document, myProject).length > 0) { openDocs.add(each); } } else { if (myProject != null && FileEditorManager.getInstance(myProject).isFileOpen(file)) { openDocs.add(each); } } } refs.removeAll(openDocs); if (refs.size() <= FREE_QUEUES_LIMIT) return; DocumentReference[] backSorted = refs.toArray(new DocumentReference[refs.size()]); Arrays.sort(backSorted, Comparator.comparingInt(this::getLastCommandTimestamp)); for (int i = 0; i < backSorted.length - FREE_QUEUES_LIMIT; i++) { DocumentReference each = backSorted[i]; if (getLastCommandTimestamp(each) + COMMANDS_TO_KEEP_LIVE_QUEUES > myCommandTimestamp) break; clearUndoRedoQueue(each); } } private int getLastCommandTimestamp(@NotNull DocumentReference ref) { return Math.max(myUndoStacksHolder.getLastCommandTimestamp(ref), myRedoStacksHolder.getLastCommandTimestamp(ref)); } @NotNull private Collection<DocumentReference> collectReferencesWithoutMergers() { Set<DocumentReference> result = new THashSet<>(); myUndoStacksHolder.collectAllAffectedDocuments(result); myRedoStacksHolder.collectAllAffectedDocuments(result); return result; } private void clearUndoRedoQueue(@NotNull DocumentReference docRef) { myMerger.flushCurrentCommand(); disposeCurrentMerger(); myUndoStacksHolder.clearStacks(false, Collections.singleton(docRef)); myRedoStacksHolder.clearStacks(false, Collections.singleton(docRef)); } @TestOnly public void setEditorProvider(@NotNull CurrentEditorProvider p) { myEditorProvider = p; } @TestOnly @NotNull public CurrentEditorProvider getEditorProvider() { return myEditorProvider; } @TestOnly public void dropHistoryInTests() { flushMergers(); LOG.assertTrue(myCommandLevel == 0, myCommandLevel); myUndoStacksHolder.clearAllStacksInTests(); myRedoStacksHolder.clearAllStacksInTests(); } @TestOnly private void flushMergers() { assert myProject == null || !myProject.isDisposed(); // Run dummy command in order to flush all mergers... CommandProcessor.getInstance().executeCommand(myProject, EmptyRunnable.getInstance(), CommonBundle.message("drop.undo.history.command.name"), null); } @TestOnly public void flushCurrentCommandMerger() { myMerger.flushCurrentCommand(); } @TestOnly public void clearUndoRedoQueueInTests(@NotNull VirtualFile file) { clearUndoRedoQueue(DocumentReferenceManager.getInstance().create(file)); } @TestOnly public void clearUndoRedoQueueInTests(@NotNull Document document) { clearUndoRedoQueue(DocumentReferenceManager.getInstance().create(document)); } @Override public String toString() { return "UndoManager for " + ObjectUtils.notNull(myProject, "application"); } }
package org.tdc.dom; import java.util.ArrayList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.tdc.book.BookUtil; import org.tdc.book.TestDoc; import org.tdc.modelinst.AttribNodeInst; import org.tdc.modelinst.CompositorNodeInst; import org.tdc.modelinst.ElementNodeInst; import org.tdc.modelinst.ModelInst; import org.tdc.modelinst.NonAttribNodeInst; import org.tdc.result.Message; import org.tdc.result.Result; import org.tdc.spreadsheet.Spreadsheet; import org.tdc.spreadsheet.SpreadsheetFile; import org.tdc.util.Util; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; /** * Builds DOM {@link Document}s for {@link TestDoc}s from the data in a particular column of a {@link Spreadsheet}. * * <p>Note: Apache POI spreadsheet processing is NOT thread safe; * as such, the processing for a {@link SpreadsheetFile} must be done in a single thread. */ public class TestDocDOMBuilder { private static final Logger log = LoggerFactory.getLogger(TestDocDOMBuilder.class); private static final String MESSAGE_TYPE_WARNING = "warning"; private final DocumentBuilder documentBuilder; private ModelInst modelInst; private Spreadsheet sheet; private int nodeRowStart; private int testDocColNum; private String testDocColLetter; private String namespace; private Result result; private int maxMessages; private Document document; public TestDocDOMBuilder() { documentBuilder = createDocumentBuilder(); nodeRowStart = Util.UNDEFINED; testDocColNum = Util.UNDEFINED; maxMessages = Util.NO_LIMIT; } public TestDocDOMBuilder setModelInst(ModelInst modelInst) { this.modelInst = modelInst; return this; } public TestDocDOMBuilder setSpreadsheet(Spreadsheet sheet) { this.sheet = sheet; return this; } public TestDocDOMBuilder setNodeRowStart(int nodeRowStart) { this.nodeRowStart = nodeRowStart; return this; } public TestDocDOMBuilder setTestDocColNumAndLetter(int testDocColNum, String testDocColLetter) { this.testDocColNum = testDocColNum; this.testDocColLetter = testDocColLetter; return this; } public TestDocDOMBuilder setNamespace(String namespace) { this.namespace = namespace; return this; } public TestDocDOMBuilder setResult(Result result) { this.result = result; return this; } public TestDocDOMBuilder setMaxMessages(int maxMessages) { this.maxMessages = maxMessages; return this; } public Document build() { if (modelInst == null || sheet == null || nodeRowStart == Util.UNDEFINED || testDocColNum == Util.UNDEFINED || namespace == null) { throw new RuntimeException("Required TestDocDOMBuilder properties not initialized"); } document = createDocument(); Node rootDOMElement = buildDOMTreeFromElementInst(modelInst.getRootElement(), true); document.appendChild(rootDOMElement); return document; } private Element buildDOMTreeFromElementInst(ElementNodeInst elementInst, boolean isRoot) { int rowNum = nodeRowStart + elementInst.getRowOffset(); String value = sheet.getCellValue(rowNum, testDocColNum).trim(); boolean hasEmptyTag = value.startsWith(BookUtil.EMPTY_TAG); value = hasEmptyTag ? "" : value; if (elementInst.hasChild() && value.length() > 0) { parentCannotHaveValueWarning(elementInst, rowNum, value); } List<Attr> childAttribs = buildChildAttribs(elementInst); List<Element> childElements = buildChildElements(elementInst); Element element = null; if (isRoot || hasEmptyTag || value.length() > 0 || childAttribs.size() > 0 || childElements.size() > 0) { element = createElement(elementInst); addChildAttribs(element, childAttribs); addChildElementsOrElementValue(element, childElements, value); } return element; } private Element createElement(ElementNodeInst elementInst) { Element element = document.createElementNS(namespace, elementInst.getName()); element.setUserData(DOMUtil.DOM_USER_DATA_RELATED_TDC_NODE, elementInst, null); return element; } private void addChildAttribs(Element element, List<Attr> childAttribs) { for (Attr attrib : childAttribs) { element.setAttributeNodeNS(attrib); } } private void addChildElementsOrElementValue(Element element, List<Element> childElements, String value) { if (childElements.size() > 0) { for (Element childElement : childElements) { element.appendChild(childElement); } } else if (value.length() > 0) { element.appendChild(document.createTextNode(value)); } } private List<Element> buildDOMTreeFromCompositorInst(CompositorNodeInst compositorInst) { int rowNum = nodeRowStart + compositorInst.getRowOffset(); String value = sheet.getCellValue(rowNum, testDocColNum).trim(); if (value.length() > 0) { compositorCannotHaveValueWarning(compositorInst, rowNum, value); } return buildChildElements(compositorInst); } private List<Attr> buildChildAttribs(ElementNodeInst elementInst) { List<Attr> childAttribs = new ArrayList<>(); for (AttribNodeInst attribInstChild : elementInst.getAttributes()) { int rowNum = nodeRowStart + attribInstChild.getRowOffset(); String value = sheet.getCellValue(rowNum, testDocColNum).trim(); boolean hasEmptyTag = value.startsWith(BookUtil.EMPTY_TAG); value = hasEmptyTag ? "" : value; Attr attr = null; if (hasEmptyTag || value.length() > 0) { attr = createAttr(attribInstChild, value); childAttribs.add(attr); } } return childAttribs; } private Attr createAttr(AttribNodeInst attribInstChild, String value) { Attr attr = document.createAttributeNS(null, attribInstChild.getNodeDef().getName()); attr.setUserData(DOMUtil.DOM_USER_DATA_RELATED_TDC_NODE, attribInstChild, null); attr.setValue(value); return attr; } private List<Element> buildChildElements(NonAttribNodeInst nonAttribInst) { List<Element> childElements = new ArrayList<>(); for (NonAttribNodeInst nonAttribInstChild : nonAttribInst.getChildren()) { if (nonAttribInstChild instanceof ElementNodeInst) { ElementNodeInst childElementInst = (ElementNodeInst)nonAttribInstChild; Element childElement = buildDOMTreeFromElementInst(childElementInst, false); if (childElement != null) { childElements.add(childElement); } } else if (nonAttribInstChild instanceof CompositorNodeInst) { CompositorNodeInst childCompositorInst = (CompositorNodeInst)nonAttribInstChild; List<Element> childList = buildDOMTreeFromCompositorInst(childCompositorInst); childElements.addAll(childList); } else { throw new IllegalStateException("NonAttributeNodeInstance of unknown type: " + nonAttribInstChild.getClass().getName()); } } return childElements; } private DocumentBuilder createDocumentBuilder() { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); try { return dbf.newDocumentBuilder(); } catch (ParserConfigurationException e) { throw new RuntimeException("Unable to create new DOM DocumentBuilder: ", e); } } private Document createDocument() { Document document = documentBuilder.newDocument(); document.setXmlStandalone(true); return document; } private void parentCannotHaveValueWarning( ElementNodeInst elementInst, int rowNum, String value) { String messageStr = "Element '" + elementInst.getDispName() + "' cannot have a value because it is a parent node; will be ignored"; warning(messageStr, rowNum, value); } private void compositorCannotHaveValueWarning( CompositorNodeInst compositorInst, int rowNum, String value) { String messageStr = "Compositor '" + compositorInst.getDispName() + "' cannot have a value; will be ignored"; warning(messageStr, rowNum, value); } private void warning(String messageStr, int rowNum, String value) { if (result != null && (maxMessages == Util.NO_LIMIT || result.getMessages().size() < maxMessages)) { Message message = new Message .Builder(MESSAGE_TYPE_WARNING, messageStr) .setRowNumColNum(rowNum, testDocColNum) .setCellRef(testDocColLetter + rowNum) .setValue(value) .build(); result.addMessage(message); } } }
/* ======================================================================== * JCommon : a free general purpose class library for the Java(tm) platform * ======================================================================== * * (C) Copyright 2000-2014, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jcommon/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Java is a trademark or registered trademark of Sun Microsystems, Inc. * in the United States and other countries.] * * ------------------ * TextUtilities.java * ------------------ * (C) Copyright 2004-2014, by Object Refinery Limited and Contributors. * * Original Author: David Gilbert (for Object Refinery Limited); * Contributor(s): Brian Fischer; * * $Id: TextUtilities.java,v 1.27 2011/12/14 20:25:40 mungady Exp $ * * Changes * ------- * 07-Jan-2004 : Version 1 (DG); * 24-Mar-2004 : Added 'paint' argument to createTextBlock() method (DG); * 07-Apr-2004 : Added getTextBounds() method and useFontMetricsGetStringBounds * flag (DG); * 08-Apr-2004 : Changed word break iterator to line break iterator in the * createTextBlock() method - see bug report 926074 (DG); * 03-Sep-2004 : Updated createTextBlock() method to add ellipses when limit * is reached (DG); * 30-Sep-2004 : Modified bounds returned by drawAlignedString() method (DG); * 10-Nov-2004 : Added new createTextBlock() method that works with * newlines (DG); * 19-Apr-2005 : Changed default value of useFontMetricsGetStringBounds (DG); * 17-May-2005 : createTextBlock() now recognises '\n' (DG); * 27-Jun-2005 : Added code to getTextBounds() method to work around Sun's bug * parade item 6183356 (DG); * 06-Jan-2006 : Reformatted (DG); * 27-Apr-2009 : Fix text wrapping with new lines (DG); * 27-Jul-2009 : Use AttributedString in drawRotatedString() (DG); * 14-Dec-2011 : Fix for nextLineBreak() method - thanks to Brian Fischer (DG); * 24-Oct-2013 : Update drawRotatedString() to use drawAlignedString() when * the rotation angle is 0.0 (DG); * 25-Oct-2013 : Added drawStringsWithFontAttributes flag (DG); * 28-Feb-2014 : Fix endless loop in createTextBlock() (DG); * */ package org.jfree.text; import java.awt.Font; import java.awt.FontMetrics; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Shape; import java.awt.font.FontRenderContext; import java.awt.font.LineMetrics; import java.awt.font.TextLayout; import java.awt.geom.AffineTransform; import java.awt.geom.Rectangle2D; import java.text.AttributedString; import java.text.BreakIterator; import org.jfree.base.BaseBoot; import org.jfree.ui.TextAnchor; import org.jfree.util.Log; import org.jfree.util.LogContext; import org.jfree.util.ObjectUtilities; /** * Some utility methods for working with text in Java2D. */ public class TextUtilities { /** Access to logging facilities. */ protected static final LogContext logger = Log.createContext( TextUtilities.class); /** * When this flag is set to <code>true</code>, strings will be drawn * as attributed strings with the attributes taken from the current font. * This allows for underlining, strike-out etc, but it means that * TextLayout will be used to render the text: * * http://www.jfree.org/phpBB2/viewtopic.php?p=45459&highlight=#45459 */ private static boolean drawStringsWithFontAttributes = false; /** * A flag that controls whether or not the rotated string workaround is * used. */ private static boolean useDrawRotatedStringWorkaround; /** * A flag that controls whether the FontMetrics.getStringBounds() method * is used or a workaround is applied. */ private static boolean useFontMetricsGetStringBounds; static { try { boolean isJava14 = ObjectUtilities.isJDK14(); String configRotatedStringWorkaround = BaseBoot.getInstance() .getGlobalConfig().getConfigProperty( "org.jfree.text.UseDrawRotatedStringWorkaround", "auto"); if (configRotatedStringWorkaround.equals("auto")) { useDrawRotatedStringWorkaround = !isJava14; } else { useDrawRotatedStringWorkaround = configRotatedStringWorkaround.equals("true"); } String configFontMetricsStringBounds = BaseBoot.getInstance() .getGlobalConfig().getConfigProperty( "org.jfree.text.UseFontMetricsGetStringBounds", "auto"); if (configFontMetricsStringBounds.equals("auto")) { useFontMetricsGetStringBounds = isJava14; } else { useFontMetricsGetStringBounds = configFontMetricsStringBounds.equals("true"); } } catch (Exception e) { // ignore everything. useDrawRotatedStringWorkaround = true; useFontMetricsGetStringBounds = true; } } /** * Private constructor prevents object creation. */ private TextUtilities() { // prevent instantiation } /** * Creates a {@link TextBlock} from a <code>String</code>. Line breaks * are added where the <code>String</code> contains '\n' characters. * * @param text the text. * @param font the font. * @param paint the paint. * * @return A text block. */ public static TextBlock createTextBlock(String text, Font font, Paint paint) { if (text == null) { throw new IllegalArgumentException("Null 'text' argument."); } TextBlock result = new TextBlock(); String input = text; boolean moreInputToProcess = (text.length() > 0); int start = 0; while (moreInputToProcess) { int index = input.indexOf("\n"); if (index > start) { String line = input.substring(start, index); if (index < input.length() - 1) { result.addLine(line, font, paint); input = input.substring(index + 1); } else { moreInputToProcess = false; } } else if (index == start) { if (index < input.length() - 1) { input = input.substring(index + 1); } else { moreInputToProcess = false; } } else { result.addLine(input, font, paint); moreInputToProcess = false; } } return result; } /** * Creates a new text block from the given string, breaking the * text into lines so that the <code>maxWidth</code> value is * respected. * * @param text the text. * @param font the font. * @param paint the paint. * @param maxWidth the maximum width for each line. * @param measurer the text measurer. * * @return A text block. */ public static TextBlock createTextBlock(String text, Font font, Paint paint, float maxWidth, TextMeasurer measurer) { return createTextBlock(text, font, paint, maxWidth, Integer.MAX_VALUE, measurer); } /** * Creates a new text block from the given string, breaking the * text into lines so that the <code>maxWidth</code> value is * respected. * * @param text the text. * @param font the font. * @param paint the paint. * @param maxWidth the maximum width for each line. * @param maxLines the maximum number of lines. * @param measurer the text measurer. * * @return A text block. */ public static TextBlock createTextBlock(String text, Font font, Paint paint, float maxWidth, int maxLines, TextMeasurer measurer) { TextBlock result = new TextBlock(); BreakIterator iterator = BreakIterator.getLineInstance(); iterator.setText(text); int current = 0; int lines = 0; int length = text.length(); while (current < length && lines < maxLines) { int next = nextLineBreak(text, current, maxWidth, iterator, measurer); if (next == BreakIterator.DONE) { result.addLine(text.substring(current), font, paint); return result; } else if (next == current) { next++; // we must take one more character or we'll loop forever } result.addLine(text.substring(current, next), font, paint); lines++; current = next; while (current < text.length()&& text.charAt(current) == '\n') { current++; } } if (current < length) { TextLine lastLine = result.getLastLine(); TextFragment lastFragment = lastLine.getLastTextFragment(); String oldStr = lastFragment.getText(); String newStr = "..."; if (oldStr.length() > 3) { newStr = oldStr.substring(0, oldStr.length() - 3) + "..."; } lastLine.removeFragment(lastFragment); TextFragment newFragment = new TextFragment(newStr, lastFragment.getFont(), lastFragment.getPaint()); lastLine.addFragment(newFragment); } return result; } /** * Returns the character index of the next line break. If the next * character is wider than <code>width</code> this method will return * <code>start</code> - the caller should check for this case. * * @param text the text (<code>null</code> not permitted). * @param start the start index. * @param width the target display width. * @param iterator the word break iterator. * @param measurer the text measurer. * * @return The index of the next line break. */ private static int nextLineBreak(String text, int start, float width, BreakIterator iterator, TextMeasurer measurer) { // this method is (loosely) based on code in JFreeReport's // TextParagraph class int current = start; int end; float x = 0.0f; boolean firstWord = true; int newline = text.indexOf('\n', start); if (newline < 0) { newline = Integer.MAX_VALUE; } while (((end = iterator.following(current)) != BreakIterator.DONE)) { x += measurer.getStringWidth(text, current, end); if (x > width) { if (firstWord) { while (measurer.getStringWidth(text, start, end) > width) { end--; if (end <= start) { return end; } } return end; } else { end = iterator.previous(); return end; } } else { if (end > newline) { return newline; } } // we found at least one word that fits ... firstWord = false; current = end; } return BreakIterator.DONE; } /** * Returns the bounds for the specified text. * * @param text the text (<code>null</code> permitted). * @param g2 the graphics context (not <code>null</code>). * @param fm the font metrics (not <code>null</code>). * * @return The text bounds (<code>null</code> if the <code>text</code> * argument is <code>null</code>). */ public static Rectangle2D getTextBounds(String text, Graphics2D g2, FontMetrics fm) { Rectangle2D bounds; if (TextUtilities.useFontMetricsGetStringBounds) { bounds = fm.getStringBounds(text, g2); // getStringBounds() can return incorrect height for some Unicode // characters...see bug parade 6183356, let's replace it with // something correct LineMetrics lm = fm.getFont().getLineMetrics(text, g2.getFontRenderContext()); bounds.setRect(bounds.getX(), bounds.getY(), bounds.getWidth(), lm.getHeight()); } else { double width = fm.stringWidth(text); double height = fm.getHeight(); if (logger.isDebugEnabled()) { logger.debug("Height = " + height); } bounds = new Rectangle2D.Double(0.0, -fm.getAscent(), width, height); } return bounds; } /** * Draws a string such that the specified anchor point is aligned to the * given (x, y) location. * * @param text the text. * @param g2 the graphics device. * @param x the x coordinate (Java 2D). * @param y the y coordinate (Java 2D). * @param anchor the anchor location. * * @return The text bounds (adjusted for the text position). */ public static Rectangle2D drawAlignedString(String text, Graphics2D g2, float x, float y, TextAnchor anchor) { Rectangle2D textBounds = new Rectangle2D.Double(); float[] adjust = deriveTextBoundsAnchorOffsets(g2, text, anchor, textBounds); // adjust text bounds to match string position textBounds.setRect(x + adjust[0], y + adjust[1] + adjust[2], textBounds.getWidth(), textBounds.getHeight()); if (!drawStringsWithFontAttributes) { g2.drawString(text, x + adjust[0], y + adjust[1]); } else { AttributedString as = new AttributedString(text, g2.getFont().getAttributes()); g2.drawString(as.getIterator(), x + adjust[0], y + adjust[1]); } return textBounds; } /** * A utility method that calculates the anchor offsets for a string. * Normally, the (x, y) coordinate for drawing text is a point on the * baseline at the left of the text string. If you add these offsets to * (x, y) and draw the string, then the anchor point should coincide with * the (x, y) point. * * @param g2 the graphics device (not <code>null</code>). * @param text the text. * @param anchor the anchor point. * @param textBounds the text bounds (if not <code>null</code>, this * object will be updated by this method to match the * string bounds). * * @return The offsets. */ private static float[] deriveTextBoundsAnchorOffsets(Graphics2D g2, String text, TextAnchor anchor, Rectangle2D textBounds) { float[] result = new float[3]; FontRenderContext frc = g2.getFontRenderContext(); Font f = g2.getFont(); FontMetrics fm = g2.getFontMetrics(f); Rectangle2D bounds = TextUtilities.getTextBounds(text, g2, fm); LineMetrics metrics = f.getLineMetrics(text, frc); float ascent = metrics.getAscent(); result[2] = -ascent; float halfAscent = ascent / 2.0f; float descent = metrics.getDescent(); float leading = metrics.getLeading(); float xAdj = 0.0f; float yAdj = 0.0f; if (anchor.isHorizontalCenter()) { xAdj = (float) -bounds.getWidth() / 2.0f; } else if (anchor.isRight()) { xAdj = (float) -bounds.getWidth(); } if (anchor.isTop()) { yAdj = -descent - leading + (float) bounds.getHeight(); } else if (anchor.isHalfAscent()) { yAdj = halfAscent; } else if (anchor.isVerticalCenter()) { yAdj = -descent - leading + (float) (bounds.getHeight() / 2.0); } else if (anchor.isBaseline()) { yAdj = 0.0f; } else if (anchor.isBottom()) { yAdj = -metrics.getDescent() - metrics.getLeading(); } if (textBounds != null) { textBounds.setRect(bounds); } result[0] = xAdj; result[1] = yAdj; return result; } /** * A utility method for drawing rotated text. * <P> * A common rotation is -Math.PI/2 which draws text 'vertically' (with the * top of the characters on the left). * * @param text the text. * @param g2 the graphics device. * @param angle the angle of the (clockwise) rotation (in radians). * @param x the x-coordinate. * @param y the y-coordinate. */ public static void drawRotatedString(String text, Graphics2D g2, double angle, float x, float y) { drawRotatedString(text, g2, x, y, angle, x, y); } /** * A utility method for drawing rotated text. * <P> * A common rotation is -Math.PI/2 which draws text 'vertically' (with the * top of the characters on the left). * * @param text the text. * @param g2 the graphics device. * @param textX the x-coordinate for the text (before rotation). * @param textY the y-coordinate for the text (before rotation). * @param angle the angle of the (clockwise) rotation (in radians). * @param rotateX the point about which the text is rotated. * @param rotateY the point about which the text is rotated. */ public static void drawRotatedString(String text, Graphics2D g2, float textX, float textY, double angle, float rotateX, float rotateY) { if ((text == null) || (text.equals(""))) { return; } if (angle == 0.0) { drawAlignedString(text, g2, textY, textY, TextAnchor.BASELINE_LEFT); return; } AffineTransform saved = g2.getTransform(); AffineTransform rotate = AffineTransform.getRotateInstance( angle, rotateX, rotateY); g2.transform(rotate); if (useDrawRotatedStringWorkaround) { // workaround for JDC bug ID 4312117 and others... TextLayout tl = new TextLayout(text, g2.getFont(), g2.getFontRenderContext()); tl.draw(g2, textX, textY); } else { if (!drawStringsWithFontAttributes) { g2.drawString(text, textX, textY); } else { AttributedString as = new AttributedString(text, g2.getFont().getAttributes()); g2.drawString(as.getIterator(), textX, textY); } } g2.setTransform(saved); } /** * Draws a string that is aligned by one anchor point and rotated about * another anchor point. * * @param text the text. * @param g2 the graphics device. * @param x the x-coordinate for positioning the text. * @param y the y-coordinate for positioning the text. * @param textAnchor the text anchor. * @param angle the rotation angle. * @param rotationX the x-coordinate for the rotation anchor point. * @param rotationY the y-coordinate for the rotation anchor point. */ public static void drawRotatedString(String text, Graphics2D g2, float x, float y, TextAnchor textAnchor, double angle, float rotationX, float rotationY) { if (text == null || text.equals("")) { return; } if (angle == 0.0) { drawAlignedString(text, g2, x, y, textAnchor); } else { float[] textAdj = deriveTextBoundsAnchorOffsets(g2, text, textAnchor); drawRotatedString(text, g2, x + textAdj[0], y + textAdj[1], angle, rotationX, rotationY); } } /** * Draws a string that is aligned by one anchor point and rotated about * another anchor point. * * @param text the text. * @param g2 the graphics device. * @param x the x-coordinate for positioning the text. * @param y the y-coordinate for positioning the text. * @param textAnchor the text anchor. * @param angle the rotation angle (in radians). * @param rotationAnchor the rotation anchor. */ public static void drawRotatedString(String text, Graphics2D g2, float x, float y, TextAnchor textAnchor, double angle, TextAnchor rotationAnchor) { if (text == null || text.equals("")) { return; } if (angle == 0.0) { drawAlignedString(text, g2, x, y, textAnchor); } else { float[] textAdj = deriveTextBoundsAnchorOffsets(g2, text, textAnchor); float[] rotateAdj = deriveRotationAnchorOffsets(g2, text, rotationAnchor); drawRotatedString(text, g2, x + textAdj[0], y + textAdj[1], angle, x + textAdj[0] + rotateAdj[0], y + textAdj[1] + rotateAdj[1]); } } /** * Returns a shape that represents the bounds of the string after the * specified rotation has been applied. * * @param text the text (<code>null</code> permitted). * @param g2 the graphics device. * @param x the x coordinate for the anchor point. * @param y the y coordinate for the anchor point. * @param textAnchor the text anchor. * @param angle the angle. * @param rotationAnchor the rotation anchor. * * @return The bounds (possibly <code>null</code>). */ public static Shape calculateRotatedStringBounds(String text, Graphics2D g2, float x, float y, TextAnchor textAnchor, double angle, TextAnchor rotationAnchor) { if (text == null || text.equals("")) { return null; } float[] textAdj = deriveTextBoundsAnchorOffsets(g2, text, textAnchor); if (logger.isDebugEnabled()) { logger.debug("TextBoundsAnchorOffsets = " + textAdj[0] + ", " + textAdj[1]); } float[] rotateAdj = deriveRotationAnchorOffsets(g2, text, rotationAnchor); if (logger.isDebugEnabled()) { logger.debug("RotationAnchorOffsets = " + rotateAdj[0] + ", " + rotateAdj[1]); } Shape result = calculateRotatedStringBounds(text, g2, x + textAdj[0], y + textAdj[1], angle, x + textAdj[0] + rotateAdj[0], y + textAdj[1] + rotateAdj[1]); return result; } /** * A utility method that calculates the anchor offsets for a string. * Normally, the (x, y) coordinate for drawing text is a point on the * baseline at the left of the text string. If you add these offsets to * (x, y) and draw the string, then the anchor point should coincide with * the (x, y) point. * * @param g2 the graphics device (not <code>null</code>). * @param text the text. * @param anchor the anchor point. * * @return The offsets. */ private static float[] deriveTextBoundsAnchorOffsets(Graphics2D g2, String text, TextAnchor anchor) { float[] result = new float[2]; FontRenderContext frc = g2.getFontRenderContext(); Font f = g2.getFont(); FontMetrics fm = g2.getFontMetrics(f); Rectangle2D bounds = TextUtilities.getTextBounds(text, g2, fm); LineMetrics metrics = f.getLineMetrics(text, frc); float ascent = metrics.getAscent(); float halfAscent = ascent / 2.0f; float descent = metrics.getDescent(); float leading = metrics.getLeading(); float xAdj = 0.0f; float yAdj = 0.0f; if (anchor.isHorizontalCenter()) { xAdj = (float) -bounds.getWidth() / 2.0f; } else if (anchor.isRight()) { xAdj = (float) -bounds.getWidth(); } if (anchor.isTop()) { yAdj = -descent - leading + (float) bounds.getHeight(); } else if (anchor.isHalfAscent()) { yAdj = halfAscent; } else if (anchor.isVerticalCenter()) { yAdj = -descent - leading + (float) (bounds.getHeight() / 2.0); } else if (anchor.isBaseline()) { yAdj = 0.0f; } else if (anchor.isBottom()) { yAdj = -metrics.getDescent() - metrics.getLeading(); } result[0] = xAdj; result[1] = yAdj; return result; } /** * A utility method that calculates the rotation anchor offsets for a * string. These offsets are relative to the text starting coordinate * (<code>BASELINE_LEFT</code>). * * @param g2 the graphics device. * @param text the text. * @param anchor the anchor point. * * @return The offsets. */ private static float[] deriveRotationAnchorOffsets(Graphics2D g2, String text, TextAnchor anchor) { float[] result = new float[2]; FontRenderContext frc = g2.getFontRenderContext(); LineMetrics metrics = g2.getFont().getLineMetrics(text, frc); FontMetrics fm = g2.getFontMetrics(); Rectangle2D bounds = TextUtilities.getTextBounds(text, g2, fm); float ascent = metrics.getAscent(); float halfAscent = ascent / 2.0f; float descent = metrics.getDescent(); float leading = metrics.getLeading(); float xAdj = 0.0f; float yAdj = 0.0f; if (anchor.isLeft()) { xAdj = 0.0f; } else if (anchor.isHorizontalCenter()) { xAdj = (float) bounds.getWidth() / 2.0f; } else if (anchor.isRight()) { xAdj = (float) bounds.getWidth(); } if (anchor.isTop()) { yAdj = descent + leading - (float) bounds.getHeight(); } else if (anchor.isVerticalCenter()) { yAdj = descent + leading - (float) (bounds.getHeight() / 2.0); } else if (anchor.isHalfAscent()) { yAdj = -halfAscent; } else if (anchor.isBaseline()) { yAdj = 0.0f; } else if (anchor.isBottom()) { yAdj = metrics.getDescent() + metrics.getLeading(); } result[0] = xAdj; result[1] = yAdj; return result; } /** * Returns a shape that represents the bounds of the string after the * specified rotation has been applied. * * @param text the text (<code>null</code> permitted). * @param g2 the graphics device. * @param textX the x coordinate for the text. * @param textY the y coordinate for the text. * @param angle the angle. * @param rotateX the x coordinate for the rotation point. * @param rotateY the y coordinate for the rotation point. * * @return The bounds (<code>null</code> if <code>text</code> is * </code>null</code> or has zero length). */ public static Shape calculateRotatedStringBounds(String text, Graphics2D g2, float textX, float textY, double angle, float rotateX, float rotateY) { if ((text == null) || (text.equals(""))) { return null; } FontMetrics fm = g2.getFontMetrics(); Rectangle2D bounds = TextUtilities.getTextBounds(text, g2, fm); AffineTransform translate = AffineTransform.getTranslateInstance( textX, textY); Shape translatedBounds = translate.createTransformedShape(bounds); AffineTransform rotate = AffineTransform.getRotateInstance( angle, rotateX, rotateY); Shape result = rotate.createTransformedShape(translatedBounds); return result; } /** * Returns the flag that controls whether the FontMetrics.getStringBounds() * method is used or not. If you are having trouble with label alignment * or positioning, try changing the value of this flag. * * @return A boolean. */ public static boolean getUseFontMetricsGetStringBounds() { return useFontMetricsGetStringBounds; } /** * Sets the flag that controls whether the FontMetrics.getStringBounds() * method is used or not. If you are having trouble with label alignment * or positioning, try changing the value of this flag. * * @param use the flag. */ public static void setUseFontMetricsGetStringBounds(boolean use) { useFontMetricsGetStringBounds = use; } /** * Returns the flag that controls whether or not a workaround is used for * drawing rotated strings. * * @return A boolean. */ public static boolean isUseDrawRotatedStringWorkaround() { return useDrawRotatedStringWorkaround; } /** * Sets the flag that controls whether or not a workaround is used for * drawing rotated strings. The related bug is on Sun's bug parade * (id 4312117) and the workaround involves using a <code>TextLayout</code> * instance to draw the text instead of calling the * <code>drawString()</code> method in the <code>Graphics2D</code> class. * * @param use the new flag value. */ public static void setUseDrawRotatedStringWorkaround(boolean use) { TextUtilities.useDrawRotatedStringWorkaround = use; } /** * Returns the flag that controls whether or not strings are drawn using * the current font attributes (such as underlining, strikethrough etc). * The default value is <code>false</code>. * * @return A boolean. * * @since 1.0.21 */ public static boolean getDrawStringsWithFontAttributes() { return TextUtilities.drawStringsWithFontAttributes; } /** * Sets the flag that controls whether or not strings are drawn using the * current font attributes. This is a hack to allow underlining of titles * without big changes to the API. See: * http://www.jfree.org/phpBB2/viewtopic.php?p=45459&highlight=#45459 * * @param b the new flag value. * * @since 1.0.21 */ public static void setDrawStringsWithFontAttributes(boolean b) { TextUtilities.drawStringsWithFontAttributes = b; } }
package com.noveogroup.screen_shot_report.controllers; import com.taskadapter.redmineapi.NotAuthorizedException; import com.taskadapter.redmineapi.RedMineException; import com.taskadapter.redmineapi.RedmineManager; import com.taskadapter.redmineapi.bean.Attachment; import com.taskadapter.redmineapi.bean.Changeset; import com.taskadapter.redmineapi.bean.Issue; import com.taskadapter.redmineapi.bean.IssueStatus; import com.taskadapter.redmineapi.bean.Membership; import com.taskadapter.redmineapi.bean.Project; import com.taskadapter.redmineapi.bean.User; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import rx.Observable; import rx.Subscriber; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; /** * Created by oisupov on 4/10/14. */ public class RedMineControllerWrapper { private Logger logger = LoggerFactory.getLogger(RedMineControllerWrapper.class); private String login; private String password; private String server; private RedmineManager redmineManager; public RedMineControllerWrapper(String login, String password, String server) { this.login = login; this.password = password; this.server = server; createManager(); } private void createManager() { redmineManager = new RedmineManager(server); redmineManager.setLogin(login); redmineManager.setPassword(password); } public Observable<List<Project>> getListOfProjects() { return updateObservable(new Observable.OnSubscribe<List<Project>>() { @Override public void call(Subscriber<? super List<Project>> subscriber) { try { subscriber.onNext(redmineManager.getProjects()); subscriber.onCompleted(); } catch (RedMineException e) { logger.trace(e.getMessage(), e); subscriber.onError(e); } } }); } public Observable<List<Issue>> getListOfIssues(final Project project) { return updateObservable(new Observable.OnSubscribe<List<Issue>>() { @Override public void call(Subscriber<? super List<Issue>> subscriber) { try { subscriber.onNext(redmineManager.getIssues(String.valueOf(project.getId()), null)); subscriber.onCompleted(); } catch (RedMineException e) { logger.trace(e.getMessage(), e); subscriber.onError(e); } } }); } public Observable<Issue> postNewIssue(final Project project, final User assignee, final String statusName, final String title, final String message, final String pictureFilename, final String logsFilename) { final Issue issue = new Issue(); issue.setProject(project); issue.setSubject(title); issue.setDescription(message); if (assignee != null) { issue.setAssignee(assignee); } if (statusName != null) { issue.setStatusName(statusName); } return createIssue(project, issue, pictureFilename, logsFilename); } private Observable<Issue> createIssue(final Project project, final Issue issue, final String pictureFilename, final String logsFilename) { return updateObservable(new Observable.OnSubscribe<Issue>() { @Override public void call(Subscriber<? super Issue> subscriber) { try { uploadAttachments(pictureFilename, issue, logsFilename); subscriber.onNext(redmineManager.createIssue(String.valueOf(project.getId()), issue)); subscriber.onCompleted(); } catch (Exception e) { logger.trace(e.getMessage(), e); subscriber.onError(e); } } }); } private void uploadAttachments(String pictureFilename, Issue issue, String logsFilename) throws RedMineException, IOException { if (pictureFilename != null) { Attachment attachment = redmineManager.uploadAttachment("image/jpeg", new File(pictureFilename)); issue.getAttachments().add(attachment); } if (logsFilename != null) { Attachment attachment = redmineManager.uploadAttachment("text", new File(logsFilename)); issue.getAttachments().add(attachment); } } private Observable<Void> updateIssue(final Issue issue, final String pictureFilename, final String logsFilename) { return updateObservable(new Observable.OnSubscribe<Void>() { @Override public void call(Subscriber<? super Void> subscriber) { try { uploadAttachments(pictureFilename, issue, logsFilename); redmineManager.update(issue); subscriber.onCompleted(); } catch (Exception e) { logger.trace(e.getMessage(), e); subscriber.onError(e); } } }); } public Observable<List<Membership>> getMemberships(final Project project) { return updateObservable(new Observable.OnSubscribe<List<Membership>>() { @Override public void call(Subscriber<? super List<Membership>> subscriber) { try { subscriber.onNext(redmineManager.getMemberships(project)); subscriber.onCompleted(); } catch (RedMineException e) { logger.trace(e.getMessage(), e); if (e instanceof NotAuthorizedException) { // Because sometimes user does not has permissions to get memberships o0 subscriber.onNext(new ArrayList<Membership>()); subscriber.onCompleted(); } else { subscriber.onError(e); } } } }); } public Observable<List<IssueStatus>> getStatuses() { return updateObservable(new Observable.OnSubscribe<List<IssueStatus>>() { @Override public void call(Subscriber<? super List<IssueStatus>> subscriber) { try { subscriber.onNext(redmineManager.getStatuses()); subscriber.onCompleted(); } catch (RedMineException e) { logger.trace(e.getMessage(), e); if (e instanceof NotAuthorizedException) { // Because sometimes user does not has permissions to get statuses o0 subscriber.onNext(new ArrayList<IssueStatus>()); subscriber.onCompleted(); } else { subscriber.onError(e); } } } }); } public Observable<Void> postCommentToTicket(final Issue issue, final User assignee, final String statusName, final String title, final String message, final String pictureFilename, final String logsFilename) { issue.setNotes(title + "\n" + message); if (assignee != null) { issue.setAssignee(assignee); } if (statusName != null) { issue.setStatusName(statusName); } return updateIssue(issue, pictureFilename, logsFilename); } private <T> Observable<T> updateObservable(Observable.OnSubscribe<T> onSubscribe) { return Observable.create(onSubscribe).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread()); } }
package hex; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import water.*; import water.fvec.*; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import static org.junit.Assert.assertEquals; // test cases: // skipMissing = TRUE/FALSE // useAllLevels = TRUE/FALSE // limit enums // (dont) standardize predictor columns // data info tests with interactions public class DataInfoTest extends TestUtil { @BeforeClass static public void setup() { stall_till_cloudsize(1); } @Test public void testAirlines1() { // just test that it works at all Frame fr = parseTestFile(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip"); try { DataInfo dinfo = new DataInfo( fr.clone(), // train null, // valid 1, // num responses true, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions ); dinfo.dropInteractions(); dinfo.remove(); } finally { fr.delete(); } } @Test public void testAirlines2() { Frame fr = parseTestFile(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip"); try { Frame interactions = Model.makeInteractions(fr, false, Model.InteractionPair.generatePairwiseInteractionsFromList(8, 16, 2), true, true,true); int len=0; for(Vec v: interactions.vecs()) len += ((InteractionWrappedVec)v).expandedLength(); interactions.delete(); Assert.assertTrue(len==290+132+10); DataInfo dinfo__noInteractions = new DataInfo( fr.clone(), // train null, // valid 1, // num responses true, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold null ); System.out.println(dinfo__noInteractions.fullN()); System.out.println(dinfo__noInteractions.numNums()); DataInfo dinfo__withInteractions = new DataInfo( fr.clone(), // train null, // valid 1, // num responses true, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions ); System.out.println(dinfo__withInteractions.fullN()); Assert.assertTrue(dinfo__withInteractions.fullN() == dinfo__noInteractions.fullN() + len); dinfo__withInteractions.dropInteractions(); dinfo__noInteractions.remove(); dinfo__withInteractions.remove(); } finally { fr.delete(); } } @Test public void testAirlines3() { Frame fr = parseTestFile(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip"); try { Frame interactions = Model.makeInteractions(fr, false, Model.InteractionPair.generatePairwiseInteractionsFromList(8, 16, 2), false, true, true); int len=0; for(Vec v: interactions.vecs()) len += ((InteractionWrappedVec)v).expandedLength(); interactions.delete(); Assert.assertTrue(len==426); DataInfo dinfo__noInteractions = new DataInfo( fr.clone(), // train null, // valid 1, // num responses false, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold null ); System.out.println(dinfo__noInteractions.fullN()); DataInfo dinfo__withInteractions = new DataInfo( fr.clone(), // train null, // valid 1, // num responses false, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions ); System.out.println(dinfo__withInteractions.fullN()); Assert.assertTrue(dinfo__withInteractions.fullN() == dinfo__noInteractions.fullN() + len); dinfo__withInteractions.dropInteractions(); dinfo__noInteractions.remove(); dinfo__withInteractions.remove(); } finally { fr.delete(); } } @Test public void testAirlinesInteractionSpec() { try { Scope.enter(); Frame fr = Scope.track(parseTestFile(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip")); Model.InteractionSpec interactionSpec = Model.InteractionSpec.create( null, new StringPair[]{new StringPair("UniqueCarrier", "Origin"), new StringPair("Origin", "DayofMonth")}, new String[]{"UniqueCarrier"} ); DataInfo dinfo = new DataInfo( fr.clone(), // train null, // valid 1, // num responses false, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold interactionSpec // interactions ); Scope.track_generic(dinfo); Assert.assertArrayEquals(new String[]{ "TailNum", "UniqueCarrier_Origin", "Dest", "Origin", "CancellationCode", "IsArrDelayed", "Origin_DayofMonth", "Year", "Month", "DayofMonth", "DayOfWeek", "DepTime", "CRSDepTime", "ArrTime", "CRSArrTime", "FlightNum", "ActualElapsedTime", "CRSElapsedTime", "AirTime", "ArrDelay", "DepDelay", "Distance", "TaxiIn", "TaxiOut", "Cancelled", "Diverted", "CarrierDelay", "WeatherDelay", "NASDelay", "SecurityDelay", "LateAircraftDelay", "IsDepDelayed"}, dinfo._adaptedFrame._names); } finally { Scope.exit(); } } @Test public void testIris1() { // test that getting sparseRows and denseRows produce the same results Frame fr = parseTestFile(Key.make("a.hex"), "smalldata/iris/iris_wheader.csv"); fr.swap(1,4); Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(0, 1); DataInfo di=null; try { di = new DataInfo( fr.clone(), // train null, // valid 1, // num responses true, // use all factor levels DataInfo.TransformType.NONE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold Model.InteractionSpec.allPairwise(new String[]{fr.name(0),fr.name(1)}) // interactions ); checker(di,false); } finally { fr.delete(); if( di!=null ) { di.dropInteractions(); di.remove(); } } } @Test public void testIris2() { // test that getting sparseRows and denseRows produce the same results Frame fr = parseTestFile(Key.make("a.hex"), "smalldata/iris/iris_wheader.csv"); fr.swap(1,4); Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(0, 1); DataInfo di=null; try { di = new DataInfo( fr.clone(), // train null, // valid 1, // num responses true, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold Model.InteractionSpec.allPairwise(new String[]{fr.name(0),fr.name(1)}) // interactions ); checker(di,true); } finally { fr.delete(); if( di!=null ) { di.dropInteractions(); di.remove(); } } } @Test public void testIris3() { // test that getting sparseRows and denseRows produce the same results Frame fr = parseTestFile(Key.make("a.hex"), "smalldata/iris/iris_wheader.csv"); fr.swap(2,4); Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(0, 1, 2, 3); DataInfo di=null; try { di = new DataInfo( fr.clone(), // train null, // valid 1, // num responses true, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold Model.InteractionSpec.allPairwise(new String[]{fr.name(0),fr.name(1),fr.name(2),fr.name(3)}) // interactions ); checker(di,true); } finally { fr.delete(); if( di!=null ) { di.dropInteractions(); di.remove(); } } } @Test public void testAirlines4() { Frame fr = parseTestFile(Key.make("a0.hex"), "smalldata/airlines/allyears2k_headers.zip"); // fixme need to rebalance to 1 chunk, otherwise the test does not pass! Key k = Key.make("a.hex"); H2O.submitTask(new RebalanceDataSet(fr,k,1)).join(); fr.delete(); fr = DKV.getGet(k); Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(8,16,2); DataInfo di=null; try { di = new DataInfo( fr.clone(), // train null, // valid 1, // num responses true, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions ); checker(di,true); } finally { fr.delete(); if( di!=null ) { di.dropInteractions(); di.remove(); } } } @Test public void testAirlines5() { Frame fr = parseTestFile(Key.make("a0.hex"), "smalldata/airlines/allyears2k_headers.zip"); // fixme need to rebalance to 1 chunk, otherwise the test does not pass! Key k = Key.make("a.hex"); H2O.submitTask(new RebalanceDataSet(fr,k,1)).join(); fr.delete(); fr = DKV.getGet(k); Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(8,16,2); DataInfo di=null; try { di = new DataInfo( fr.clone(), // train null, // valid 1, // num responses false, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions ); checker(di,true); } finally { fr.delete(); if( di!=null ) { di.dropInteractions(); di.remove(); } } } @Test public void testCoefNames() throws IOException { // just test that it works at all Frame fr = parseTestFile(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip"); DataInfo dinfo = null; try { dinfo = new DataInfo( fr.clone(), // train null, // valid 1, // num responses true, // use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions ); Assert.assertNull(dinfo._coefNames); // coef names are not populated at first final String[] cn = dinfo.coefNames(); Assert.assertNotNull(cn); Assert.assertArrayEquals(cn, dinfo._coefNames); // coef names are cached after first accessed DKV.put(dinfo); ByteArrayOutputStream baos = new ByteArrayOutputStream(); dinfo.writeAll(new AutoBuffer(baos, true)).close(); baos.close(); ByteArrayInputStream input = new ByteArrayInputStream(baos.toByteArray()); DataInfo deserialized = (DataInfo) Keyed.readAll(new AutoBuffer(input)); Assert.assertNotNull(deserialized); Assert.assertArrayEquals(cn, deserialized._coefNames); // coef names were preserved in the deserialized object } finally { if (dinfo != null) { dinfo.dropInteractions(); dinfo.remove(); } fr.delete(); } } @Test public void testInteractionsForcedAllFactors() { try { Scope.enter(); Frame fr = Scope.track(parseTestFile(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip")); Frame sfr = fr.subframe(new String[]{"Origin", "Distance"}); Model.InteractionSpec interactionSpec = Model.InteractionSpec.create( new String[]{"Origin", "Distance"}, null, new String[] {"Distance"}); DataInfo dinfo = new DataInfo( sfr, // train null, // valid 1, // num responses false, // DON'T use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold interactionSpec // interaction spec ); assertEquals(fr.vec("Origin").domain().length, dinfo.coefNames().length); String[] expected = new String[dinfo.coefNames().length]; for (int i = 0; i < expected.length; i++) expected[i] = "Origin_Distance." + sfr.vec("Origin").domain()[i]; Assert.assertArrayEquals(expected, dinfo.coefNames()); dinfo.dropInteractions(); dinfo.remove(); } finally { Scope.exit(); } } @Test public void testInteractionsSkip1stFactor() { try { Scope.enter(); Frame fr = Scope.track(parseTestFile(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip")); Frame sfr = fr.subframe(new String[]{"Origin", "Distance", "IsDepDelayed"}); Model.InteractionSpec interactionSpec = Model.InteractionSpec.create( new String[]{"Origin", "Distance"}, null, new String[]{"Origin"}); DataInfo dinfo = new DataInfo( sfr, // train null, // valid 1, // num responses false, // DON'T use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold interactionSpec // interaction spec ); // Check that we get correct expanded coefficients and "Distance" is not dropped assertEquals(fr.vec("Origin").domain().length, dinfo.coefNames().length); String[] expected = new String[dinfo.coefNames().length]; expected[expected.length - 1] = "Distance"; for (int i = 0; i < expected.length - 1; i++) expected[i] = "Origin_Distance." + fr.vec("Origin").domain()[i + 1]; Assert.assertArrayEquals(expected, dinfo.coefNames()); // Check that we can look-up "Categorical Id" for valid levels for (int j = /*don't use all factor levels*/ 1; j < dinfo._adaptedFrame.vec(0).domain().length; j++) { if (dinfo.getCategoricalIdFromInteraction(0, j) < 0) Assert.fail("Categorical value should be recognized: " + j); } // Check that we get "mode" for unknown level dinfo._valid = true; assertEquals(fr.vec("Origin").mode(), dinfo.getCategoricalIdFromInteraction(0, dinfo._adaptedFrame.vec(0).domain().length)); dinfo.dropInteractions(); dinfo.remove(); } finally { Scope.exit(); } } @Test public void testGetCategoricalIdFromInteraction() { try { Scope.enter(); Frame fr = Scope.track(parseTestFile(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip")); Frame sfr = fr.subframe(new String[]{"Origin", "Distance", "IsDepDelayed"}); Model.InteractionSpec interactionSpec = Model.InteractionSpec.create( new String[]{"Origin", "Distance"}, null, new String[]{"Origin"}); DataInfo dinfo = new DataInfo( sfr, // train null, // valid 1, // num responses false, // DON'T use all factor levels DataInfo.TransformType.STANDARDIZE, // predictor transform DataInfo.TransformType.NONE, // response transform true, // skip missing false, // impute missing false, // missing bucket false, // weight false, // offset false, // fold interactionSpec // interaction spec ); // Check that we can look-up "Categorical Id" for valid levels for (int j = /*don't use all factor levels*/ 1; j < dinfo._adaptedFrame.vec(0).domain().length; j++) { if (dinfo.getCategoricalIdFromInteraction(0, j) < 0) Assert.fail("Categorical value should be recognized: " + j); } // Check that we get "mode" for unknown level dinfo._valid = true; assertEquals(fr.vec("Origin").mode(), dinfo.getCategoricalIdFromInteraction(0, dinfo._adaptedFrame.vec(0).domain().length)); dinfo.dropInteractions(); dinfo.remove(); } finally { Scope.exit(); } } private static DataInfo.Row[] makeRowsOpsTestData() { // few rows to test row operations (inner product, ...) Frame f = TestFrameCatalog.oneChunkFewRows(); DataInfo di = new DataInfo(f, null, 1, true, DataInfo.TransformType.NONE, DataInfo.TransformType.NONE, true, false, false, false, false, false, null) .disableIntercept(); Chunk[] chks = new Chunk[f.numCols()]; for (int i = 0; i < chks.length; i++) chks[i] = di._adaptedFrame.vec(i).chunkForChunkIdx(0); return new DataInfo.Row[] { di.extractDenseRow(chks, 0, di.newDenseRow()), di.extractDenseRow(chks, 1, di.newDenseRow()), di.extractDenseRow(chks, 2, di.newDenseRow()) }; } @Test public void testInnerProduct() { Scope.enter(); try { DataInfo.Row[] rs = makeRowsOpsTestData(); assertEquals(3.44, rs[0].innerProduct(rs[0]), 0); assertEquals(4.08, rs[0].innerProduct(rs[1]), 0); assertEquals(6.72, rs[0].innerProduct(rs[2]), 0); } finally { Scope.exit(); } } @Test public void testTwoNormSq() { Scope.enter(); try { DataInfo.Row[] rs = makeRowsOpsTestData(); assertEquals(3.44, rs[0].twoNormSq(), 0); assertEquals(rs[1].innerProduct(rs[1]), rs[1].twoNormSq(), 0); assertEquals(rs[2].innerProduct(rs[2]), rs[2].twoNormSq(), 0); } finally { Scope.exit(); } } // @Test public void personalChecker() { // final Frame gold = parseTestFile(Key.make("gold"), "/Users/spencer/Desktop/ffff.csv"); // Frame fr = parseTestFile(Key.make("a.hex"), "/Users/spencer/Desktop/iris.csv"); // fr.swap(3,4); // DataInfo di0=null; // try { // di0 = new DataInfo( // fr.clone(), // train // null, // valid // 1, // num responses // false, // use all factor levels // DataInfo.TransformType.STANDARDIZE, // predictor transform // DataInfo.TransformType.NONE, // response transform // true, // skip missing // false, // impute missing // false, // missing bucket // false, // weight // false, // offset // false, // fold // new String[]{"Species", "Sepal.Length", "Petal.Length"} // interactions // ); // final DataInfo di=di0; // new MRTask() { // @Override public void map(Chunk[] cs) { // DataInfo.Row[] sparseRows = di.extractSparseRows(cs); // for(int i=0;i<cs[0]._len;++i) { //// di.extractDenseRow(cs, i, r); // DataInfo.Row r = sparseRows[i]; // int idx=1; // for (int j = di.numStart(); j < di.fullN(); ++j) { // double goldValue = gold.vec(idx++).at(i+cs[0].start()); // double thisValue = r.get(j) - (di._normSub[j - di.numStart()] * di._normMul[j-di.numStart()]); // double diff = Math.abs(goldValue - thisValue); // if( diff > 1e-12 ) // throw new RuntimeException("bonk"); // } // } // } // }.doAll(di0._adaptedFrame); // } finally { // fr.delete(); // gold.delete(); // if( di0!=null ) { // di0.dropInteractions(); // di0.remove(); // } // } // } private static void printVals(DataInfo di, DataInfo.Row denseRow, DataInfo.Row sparseRow) { System.out.println("col|dense|sparse|sparseScaled"); double sparseScaled; String line; for(int i=0;i<di.fullN();++i) { sparseScaled = sparseRow.get(i); if( i>=di.numStart() ) sparseScaled -= (di._normSub[i - di.numStart()] * di._normMul[i-di.numStart()]); line = i+"|"+denseRow.get(i)+"|"+sparseRow.get(i)+"|"+sparseScaled; if( Math.abs(denseRow.get(i)-sparseScaled) > 1e-14 ) System.out.println(">" + line + "<"); } } private static void checker(final DataInfo di, final boolean standardize) { new MRTask() { @Override public void map(Chunk[] cs) { if(cs[0].start() == 23889){ System.out.println("haha"); } DataInfo.Row[] sparseRows = di.extractSparseRows(cs); DataInfo.Row r = di.newDenseRow(); for(int i=0;i<cs[0]._len;++i) { di.extractDenseRow(cs, i, r); for (int j = 0; j < di.fullN(); ++j) { double sparseDoubleScaled = sparseRows[i].get(j); // extracting sparse rows does not do the full scaling!! if( j>=di.numStart() ) { // finish scaling the sparse value sparseDoubleScaled -= (standardize?(di._normSub[j - di.numStart()] * di._normMul[j-di.numStart()]):0); } if( r.isBad() || sparseRows[i].isBad() ) { if( sparseRows[i].isBad() && r.isBad() ) continue; // both bad OK throw new RuntimeException("dense row was "+(r.isBad()?"bad":"not bad") + "; but sparse row was "+(sparseRows[i].isBad()?"bad":"not bad")); } if( Math.abs(r.get(j)-sparseDoubleScaled) > 1e-10 ) { printVals(di,r,sparseRows[i]); throw new RuntimeException("Row mismatch on row " + i); } } } } }.doAll(di._adaptedFrame); } }
/* * Copyright (c) 2003, 2007, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ /* * Portions Copyright IBM Corporation, 1997, 2001. All Rights Reserved. */ package javay.test.math; import java.io.*; /** * Immutable objects which encapsulate the context settings which * describe certain rules for numerical operators, such as those * implemented by the {@link BigDecimal} class. * * <p>The base-independent settings are: * <ol> * <li>{@code precision}: * the number of digits to be used for an operation; results are * rounded to this precision * * <li>{@code roundingMode}: * a {@link RoundingMode} object which specifies the algorithm to be * used for rounding. * </ol> * * @see BigDecimal * @see RoundingMode * @author Mike Cowlishaw * @author Joseph D. Darcy * @since 1.5 */ public final class MathContext implements Serializable { /* ----- Constants ----- */ // defaults for constructors private static final int DEFAULT_DIGITS = 9; private static final RoundingMode DEFAULT_ROUNDINGMODE = RoundingMode.HALF_UP; // Smallest values for digits (Maximum is Integer.MAX_VALUE) private static final int MIN_DIGITS = 0; // Serialization version private static final long serialVersionUID = 5579720004786848255L; /* ----- Public Properties ----- */ /** * A {@code MathContext} object whose settings have the values * required for unlimited precision arithmetic. * The values of the settings are: * <code> * precision=0 roundingMode=HALF_UP * </code> */ public static final MathContext UNLIMITED = new MathContext(0, RoundingMode.HALF_UP); /** * A {@code MathContext} object with a precision setting * matching the IEEE 754R Decimal32 format, 7 digits, and a * rounding mode of {@link RoundingMode#HALF_EVEN HALF_EVEN}, the * IEEE 754R default. */ public static final MathContext DECIMAL32 = new MathContext(7, RoundingMode.HALF_EVEN); /** * A {@code MathContext} object with a precision setting * matching the IEEE 754R Decimal64 format, 16 digits, and a * rounding mode of {@link RoundingMode#HALF_EVEN HALF_EVEN}, the * IEEE 754R default. */ public static final MathContext DECIMAL64 = new MathContext(16, RoundingMode.HALF_EVEN); /** * A {@code MathContext} object with a precision setting * matching the IEEE 754R Decimal128 format, 34 digits, and a * rounding mode of {@link RoundingMode#HALF_EVEN HALF_EVEN}, the * IEEE 754R default. */ public static final MathContext DECIMAL128 = new MathContext(34, RoundingMode.HALF_EVEN); /* ----- Shared Properties ----- */ /** * The number of digits to be used for an operation. A value of 0 * indicates that unlimited precision (as many digits as are * required) will be used. Note that leading zeros (in the * coefficient of a number) are never significant. * * <p>{@code precision} will always be non-negative. * * @serial */ final int precision; /** * The rounding algorithm to be used for an operation. * * @see RoundingMode * @serial */ final RoundingMode roundingMode; /* ----- Constructors ----- */ /** * Constructs a new {@code MathContext} with the specified * precision and the {@link RoundingMode#HALF_UP HALF_UP} rounding * mode. * * @param setPrecision The non-negative {@code int} precision setting. * @throws IllegalArgumentException if the {@code setPrecision} parameter is less * than zero. */ public MathContext(int setPrecision) { this(setPrecision, DEFAULT_ROUNDINGMODE); return; } /** * Constructs a new {@code MathContext} with a specified * precision and rounding mode. * * @param setPrecision The non-negative {@code int} precision setting. * @param setRoundingMode The rounding mode to use. * @throws IllegalArgumentException if the {@code setPrecision} parameter is less * than zero. * @throws NullPointerException if the rounding mode argument is {@code null} */ public MathContext(int setPrecision, RoundingMode setRoundingMode) { if (setPrecision < MIN_DIGITS) throw new IllegalArgumentException("Digits < 0"); if (setRoundingMode == null) throw new NullPointerException("null RoundingMode"); precision = setPrecision; roundingMode = setRoundingMode; return; } /** * Constructs a new {@code MathContext} from a string. * * The string must be in the same format as that produced by the * {@link #toString} method. * * <p>An {@code IllegalArgumentException} is thrown if the precision * section of the string is out of range ({@code < 0}) or the string is * not in the format created by the {@link #toString} method. * * @param val The string to be parsed * @throws IllegalArgumentException if the precision section is out of range * or of incorrect format * @throws NullPointerException if the argument is {@code null} */ public MathContext(String val) { boolean bad = false; int setPrecision; if (val == null) throw new NullPointerException("null String"); try { // any error here is a string format problem if (!val.startsWith("precision=")) throw new RuntimeException(); int fence = val.indexOf(' '); // could be -1 int off = 10; // where value starts setPrecision = Integer.parseInt(val.substring(10, fence)); if (!val.startsWith("roundingMode=", fence+1)) throw new RuntimeException(); off = fence + 1 + 13; String str = val.substring(off, val.length()); roundingMode = RoundingMode.valueOf(str); } catch (RuntimeException re) { throw new IllegalArgumentException("bad string format"); } if (setPrecision < MIN_DIGITS) throw new IllegalArgumentException("Digits < 0"); // the other parameters cannot be invalid if we got here precision = setPrecision; } /** * Returns the {@code precision} setting. * This value is always non-negative. * * @return an {@code int} which is the value of the {@code precision} * setting */ public int getPrecision() { return precision; } /** * Returns the roundingMode setting. * This will be one of * {@link RoundingMode#CEILING}, * {@link RoundingMode#DOWN}, * {@link RoundingMode#FLOOR}, * {@link RoundingMode#HALF_DOWN}, * {@link RoundingMode#HALF_EVEN}, * {@link RoundingMode#HALF_UP}, * {@link RoundingMode#UNNECESSARY}, or * {@link RoundingMode#UP}. * * @return a {@code RoundingMode} object which is the value of the * {@code roundingMode} setting */ public RoundingMode getRoundingMode() { return roundingMode; } /** * Compares this {@code MathContext} with the specified * {@code Object} for equality. * * @param x {@code Object} to which this {@code MathContext} is to * be compared. * @return {@code true} if and only if the specified {@code Object} is * a {@code MathContext} object which has exactly the same * settings as this object */ public boolean equals(Object x){ MathContext mc; if (!(x instanceof MathContext)) return false; mc = (MathContext) x; return mc.precision == this.precision && mc.roundingMode == this.roundingMode; // no need for .equals() } /** * Returns the hash code for this {@code MathContext}. * * @return hash code for this {@code MathContext} */ public int hashCode() { return this.precision + roundingMode.hashCode() * 59; } /** * Returns the string representation of this {@code MathContext}. * The {@code String} returned represents the settings of the * {@code MathContext} object as two space-delimited words * (separated by a single space character, <tt>'&#92;u0020'</tt>, * and with no leading or trailing white space), as follows: * <ol> * <li> * The string {@code "precision="}, immediately followed * by the value of the precision setting as a numeric string as if * generated by the {@link Integer#toString(int) Integer.toString} * method. * * <li> * The string {@code "roundingMode="}, immediately * followed by the value of the {@code roundingMode} setting as a * word. This word will be the same as the name of the * corresponding public constant in the {@link RoundingMode} * enum. * </ol> * <p> * For example: * <pre> * precision=9 roundingMode=HALF_UP * </pre> * * Additional words may be appended to the result of * {@code toString} in the future if more properties are added to * this class. * * @return a {@code String} representing the context settings */ public java.lang.String toString() { return "precision=" + precision + " " + "roundingMode=" + roundingMode.toString(); } // Private methods /** * Reconstitute the {@code MathContext} instance from a stream (that is, * deserialize it). * * @param s the stream being read. */ private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); // read in all fields // validate possibly bad fields if (precision < MIN_DIGITS) { String message = "MathContext: invalid digits in stream"; throw new java.io.StreamCorruptedException(message); } if (roundingMode == null) { String message = "MathContext: null roundingMode in stream"; throw new java.io.StreamCorruptedException(message); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document; import java.util.Set; import java.util.concurrent.locks.Lock; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.commons.json.JsopReader; import org.apache.jackrabbit.oak.commons.json.JsopStream; import org.apache.jackrabbit.oak.commons.json.JsopTokenizer; import org.apache.jackrabbit.oak.commons.json.JsopWriter; import org.apache.jackrabbit.oak.plugins.document.util.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Striped; import com.mongodb.BasicDBObject; import com.mongodb.BasicDBObjectBuilder; import com.mongodb.DB; import com.mongodb.DBCollection; import com.mongodb.DBObject; import com.mongodb.MongoException; import com.mongodb.WriteConcern; import static com.google.common.base.Preconditions.checkNotNull; /** * A diff cache implementation using a capped collection as a secondary cache. */ public class MongoDiffCache extends MemoryDiffCache { private static final Logger LOG = LoggerFactory.getLogger(MongoDiffCache.class); private static final long MB = 1024 * 1024; private static final String COLLECTION_NAME = "changes"; private final DBCollection changes; private final Cache<String, String> blacklist = CacheBuilder.newBuilder().maximumSize(1024).build(); private final Striped<Lock> locks = Striped.lock(16); public MongoDiffCache(DB db, int sizeMB, DocumentMK.Builder builder) { super(builder); if (db.collectionExists(COLLECTION_NAME)) { changes = db.getCollection(COLLECTION_NAME); } else { changes = db.createCollection(COLLECTION_NAME, BasicDBObjectBuilder.start().add("capped", true) .add("size", sizeMB * MB).get()); } } @CheckForNull @Override public String getChanges(@Nonnull Revision from, @Nonnull Revision to, @Nonnull String path) { Lock lock = locks.get(from); lock.lock(); try { // first try to serve from cache String diff = super.getChanges(from, to, path); if (diff != null) { return diff; } if (from.getClusterId() != to.getClusterId()) { return null; } // check blacklist if (blacklist.getIfPresent(from + "/" + to) != null) { return null; } Revision id = to; Diff d = null; int numCommits = 0; for (;;) { // grab from mongo DBObject obj = changes.findOne(new BasicDBObject("_id", id.toString())); if (obj == null) { return null; } numCommits++; if (numCommits > 32) { // do not merge more than 32 commits blacklist.put(from + "/" + to, ""); return null; } if (d == null) { d = new Diff(obj); } else { d.mergeBeforeDiff(new Diff(obj)); } // the from revision of the current diff id = Revision.fromString((String) obj.get("_b")); if (from.equals(id)) { // diff is complete LOG.debug("Built diff from {} commits", numCommits); // apply to diff cache and serve later requests from cache d.applyToEntry(super.newEntry(from, to)).done(); // return changes return d.getChanges(path); } if (StableRevisionComparator.INSTANCE.compare(id, from) < 0) { break; } } return null; } finally { lock.unlock(); } } @Nonnull @Override public Entry newEntry(@Nonnull final Revision from, @Nonnull final Revision to) { return new MemoryEntry(from, to) { private Diff commit = new Diff(from, to); @Override public void append(@Nonnull String path, @Nonnull String changes) { // super.append() will apply to diff cache in base class super.append(path, changes); commit.append(path, changes); } @Override public void done() { try { changes.insert(commit.doc, WriteConcern.UNACKNOWLEDGED); } catch (MongoException e) { LOG.warn("Write back of diff cache entry failed", e); } } }; } static class Diff { private final DBObject doc; Diff(Revision from, Revision to) { this.doc = new BasicDBObject(); this.doc.put("_id", to.toString()); this.doc.put("_b", from.toString()); } Diff(DBObject doc) { this.doc = doc; } void append(String path, String changes) { DBObject current = doc; for (String name : PathUtils.elements(path)) { String escName = Utils.escapePropertyName(name); if (current.containsField(escName)) { current = (DBObject) current.get(escName); } else { BasicDBObject child = new BasicDBObject(); current.put(escName, child); current = child; } } current.put("_c", checkNotNull(changes)); } String getChanges(String path) { DBObject current = doc; for (String name : PathUtils.elements(path)) { String n = Utils.unescapePropertyName(name); current = (DBObject) current.get(n); if (current == null) { break; } } if (current == null || !current.containsField("_c")) { // no changes here return ""; } else { return current.get("_c").toString(); } } Entry applyToEntry(Entry entry) { applyInternal(doc, "/", entry); return entry; } void mergeBeforeDiff(Diff before) { mergeInternal(doc, before.doc, Sets.<String>newHashSet(), Sets.<String>newHashSet(), Sets.<String>newHashSet()); doc.put("_b", before.doc.get("_b")); } private static void mergeInternal(DBObject doc, DBObject before, final Set<String> added, final Set<String> removed, final Set<String> modified) { added.clear(); removed.clear(); modified.clear(); String changes = (String) doc.get("_c"); if (changes != null) { parse(changes, new ParserCallback() { @Override public void added(String name) { added.add(name); } @Override public void removed(String name) { removed.add(name); } @Override public void modified(String name) { modified.add(name); } }); } changes = (String) before.get("_c"); if (changes != null) { parse(changes, new ParserCallback() { @Override public void added(String name) { if (modified.remove(name) || !removed.remove(name)) { added.add(name); } } @Override public void removed(String name) { if (added.remove(name)) { modified.add(name); } else { removed.add(name); } } @Override public void modified(String name) { if (added.remove(name) || !removed.contains(name)) { modified.add(name); } } }); doc.put("_c", serialize(added, removed, modified)); } // merge recursively for (String k : before.keySet()) { if (Utils.isPropertyName(k)) { DBObject beforeChild = (DBObject) before.get(k); DBObject thisChild = (DBObject) doc.get(k); if (thisChild == null) { thisChild = new BasicDBObject(); doc.put(k, thisChild); } mergeInternal(thisChild, beforeChild, added, removed, modified); } } } private static String serialize(final Set<String> added, final Set<String> removed, final Set<String> modified) { JsopWriter w = new JsopStream(); for (String p : added) { w.tag('+').key(PathUtils.getName(p)).object().endObject().newline(); } for (String p : removed) { w.tag('-').value(PathUtils.getName(p)).newline(); } for (String p : modified) { w.tag('^').key(PathUtils.getName(p)).object().endObject().newline(); } return w.toString(); } private static void parse(String changes, ParserCallback callback) { JsopTokenizer t = new JsopTokenizer(changes); for (;;) { int r = t.read(); if (r == JsopReader.END) { break; } switch (r) { case '+': { callback.added(t.readString()); t.read(':'); t.read('{'); t.read('}'); break; } case '-': { callback.removed(t.readString()); break; } case '^': { callback.modified(t.readString()); t.read(':'); t.read('{'); t.read('}'); break; } default: throw new IllegalArgumentException("jsonDiff: illegal token '" + t.getToken() + "' at pos: " + t.getLastPos() + ' ' + changes); } } } private void applyInternal(DBObject obj, String path, Entry entry) { String diff = (String) obj.get("_c"); if (diff != null) { entry.append(path, diff); } for (String k : obj.keySet()) { if (Utils.isPropertyName(k)) { String name = Utils.unescapePropertyName(k); applyInternal((DBObject) obj.get(k), PathUtils.concat(path, name), entry); } } } private interface ParserCallback { void added(String name); void removed(String name); void modified(String name); } } }
/******************************************************************************* * Copyright (c) 2006-2010 eBay Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 *******************************************************************************/ package org.ebayopensource.turmeric.tools.codegen.external; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.FileHandler; import java.util.logging.Level; import org.apache.axis2.wsdl.WSDL2Java; import org.apache.ws.java2wsdl.Java2WSDLCodegenEngine; import org.apache.ws.java2wsdl.utils.Java2WSDLCommandLineOption; import org.apache.ws.java2wsdl.utils.Java2WSDLCommandLineOptionParser; import org.ebayopensource.turmeric.common.config.TypeInformationType; import org.ebayopensource.turmeric.common.config.TypeLibraryType; import org.ebayopensource.turmeric.runtime.codegen.common.NSPkgMappingType; import org.ebayopensource.turmeric.runtime.codegen.common.NSToPkgMappingList; import org.ebayopensource.turmeric.runtime.codegen.common.OpNameCemcMappingType; import org.ebayopensource.turmeric.runtime.codegen.common.OpNameToCemcMappingList; import org.ebayopensource.turmeric.runtime.codegen.common.PkgToNSMappingList; import org.ebayopensource.turmeric.runtime.common.impl.utils.CallTrackingLogger; import org.ebayopensource.turmeric.runtime.common.impl.utils.LogManager; import org.ebayopensource.turmeric.runtime.common.types.SOAConstants; import org.ebayopensource.turmeric.tools.codegen.CodeGenContext; import org.ebayopensource.turmeric.tools.codegen.InputOptions; import org.ebayopensource.turmeric.tools.codegen.exception.CodeGenFailedException; import org.ebayopensource.turmeric.tools.codegen.exception.PreProcessFailedException; import org.ebayopensource.turmeric.tools.codegen.external.wsdl.parser.WsdlParserUtil; import org.ebayopensource.turmeric.tools.codegen.util.CodeGenConstants; import org.ebayopensource.turmeric.tools.codegen.util.CodeGenUtil; import org.ebayopensource.turmeric.tools.codegen.util.TypeLibraryClassDetails; import org.ebayopensource.turmeric.tools.library.SOAGlobalRegistryFactory; import org.ebayopensource.turmeric.tools.library.SOATypeRegistry; import org.ebayopensource.turmeric.tools.library.TypeLibraryInputOptions; import org.ebayopensource.turmeric.tools.library.builders.TypeLibraryParser; import org.ebayopensource.turmeric.tools.library.codegen.TypeLibraryCodeGenContext; public class AxisJavaWSDLGeneratorImpl implements JavaWSDLGenerator { private static final String PKG_PARAM = "@pkg@"; private static final String NAMESPACE_PARAM = "@ns@"; private static final String JAVA_2_WSDL_PKG_TO_NS_PATTERN = "[" + PKG_PARAM + "," + NAMESPACE_PARAM + "]"; private static final String XML_TRANSFORMER_FACTORY = "javax.xml.transform.TransformerFactory"; private static CallTrackingLogger s_logger = LogManager.getInstance(AxisJavaWSDLGeneratorImpl.class); private FileHandler m_fileHandlerForWSDL2Java; private CallTrackingLogger getLogger() { return s_logger; } public AxisJavaWSDLGeneratorImpl() { String codegenConfigProp = "/org/ebayopensource/turmeric/tools/codegen/external/codegen-config.properties"; System.setProperty("org.apache.axis2.codegen.config", codegenConfigProp); /* Perform a quick check on the ClassLoader to see if the axis2 config * file can be found. * Axis2 will trigger a NPE if this file is not found via the classloader. * This makes this issue more meaningful in the logs. */ URL url = this.getClass().getResource(codegenConfigProp); if(url == null) { getLogger().severe("Axis2 Configuration File not present in ClassLoader: " + codegenConfigProp); } } public void java2WSDL( CodeGenContext codeGenCtx, String qualifiedIntfName, String destLocation) throws CodeGenFailedException { System.setProperty("soa.service.default.ns", codeGenCtx.getNamespace()); OpNameToCemcMappingList opNameToCemcMappings = codeGenCtx.getInputOptions().getOpNameToCemcMappings(); String opNameToCemcMapString = getOpNameToCemcMapString(opNameToCemcMappings); if (opNameToCemcMapString != null) { System.setProperty("soa.service.opname.to.cemc.map", opNameToCemcMapString); } String[] args = getJava2WSDLToolArgs(codeGenCtx, qualifiedIntfName, destLocation); if(LogManager.isTracingEnabled()){ String strMsg = Arrays.toString(args); getLogger().debug("Arguments passed to AXIS2's Java2WSDL : \n"+ strMsg); } try { Java2WSDLCommandLineOptionParser cmdArgsParser = new Java2WSDLCommandLineOptionParser(args); Map<String,Java2WSDLCommandLineOption> allOptions = cmdArgsParser.getAllOptions(); Java2WSDLCodegenEngine java2WSDLEngine = new Java2WSDLCodegenEngine(allOptions); // generate WSDL java2WSDLEngine.generate(); // Pretty format generated WSDL // this is quick fix until Axis2 stops pretty formatting all // XML and WSDL files under output directories. String wsdlFilePath = CodeGenUtil.toOSFilePath(destLocation) + getWSDLFileName(codeGenCtx.getServiceAdminName()); prettyFormatWSDL(wsdlFilePath); } catch (Exception ex) { throw new CodeGenFailedException( "Failed to generate WSDL for : " + qualifiedIntfName, ex); } finally { System.setProperty("soa.service.default.ns", ""); System.setProperty("soa.service.opname.to.cemc.map", ""); } } public void wsdl2Java(CodeGenContext codeGenCtx, String destLocation) throws CodeGenFailedException { String oldFactoryName = System.getProperty(XML_TRANSFORMER_FACTORY, ""); // set to new value try { Class.forName("org.apache.xalan.processor.TransformerFactoryImpl");//Transformer facotry impl available with IBM JRE System.setProperty(XML_TRANSFORMER_FACTORY, "org.apache.xalan.processor.TransformerFactoryImpl"); } catch (ClassNotFoundException e1) { try { Class.forName("com.sun.org.apache.xalan.internal.xsltc.trax.TransformerFactoryImpl");//Transformer facotry impl available with SUN JDK 6 System.setProperty(XML_TRANSFORMER_FACTORY, "com.sun.org.apache.xalan.internal.xsltc.trax.TransformerFactoryImpl"); } catch (ClassNotFoundException e) { System.setProperty(XML_TRANSFORMER_FACTORY, ""); // use the availble Transformer factory if the preferred ones are not available } } getLogger().log(Level.INFO, "The XML Transformer factory used is : " + System.getProperty(XML_TRANSFORMER_FACTORY) ); String[] args = getWSDL2JavaToolArgs(codeGenCtx, destLocation); getLogger().log(Level.INFO,"Arguments passed to AXIS2's WSDL2Java : \n"+ Arrays.toString(args)); File wsdl2javaLogFile = getLogFileOfWSDL2JavaCall(); try { WSDL2Java.main(args); } catch (Exception ex) { String wsdl2JavaWarningAndError = getWarningErorFromLog(wsdl2javaLogFile); CodeGenFailedException codeGenFailedException = new CodeGenFailedException( "Failed to generate Java code for : " + codeGenCtx.getInputOptions().getInputFile() + "\n" + wsdl2JavaWarningAndError, ex); codeGenFailedException.setMessageFormatted(true); throw codeGenFailedException; } finally { removeFileHandlerForWSDL2JavaLog(); //remove the unneccessary ObjectFactory.java and package-info.java SOAPLATFORM-609 deleteDuplicateFilesFromDefaultPackage(codeGenCtx); if(codeGenCtx.getInputOptions().isObjectFactoryTobeDeleted()) deleteObjectFactoryfile(codeGenCtx); // reset to old factory name if (null != oldFactoryName && oldFactoryName.length() != 0) { System.setProperty(XML_TRANSFORMER_FACTORY, oldFactoryName); } else { System.setProperties(null); } } try { //need to delete the wsdl2javalog file now. s_logger.log(Level.FINE,"Deleting wsdl2javaLog file"); CodeGenUtil.closeQuietly(m_fileHandlerForWSDL2Java); CodeGenUtil.deleteFile(wsdl2javaLogFile); deleteAdditionalJavaTypeFiles(codeGenCtx,destLocation); } catch (Exception e) { getLogger().log(Level.WARNING, "Exception while trying to delete extra types. " + e.getMessage(), e); } // Calls to WSDL2Java are nulling out some of the set System properties like http.ProxyHost and http.ProxyPort String proxyHost = codeGenCtx.getInputOptions().getHttpProxyHost(); String proxyPort = codeGenCtx.getInputOptions().getHttpProxyPort(); if( !CodeGenUtil.isEmptyString(proxyHost) && !CodeGenUtil.isEmptyString(proxyPort)){ System.setProperty("http.proxyHost", proxyHost); System.setProperty("http.proxyPort", proxyPort); } //Library support : deletion of files mentioned in the XML file of the -tlx option List<TypeLibraryClassDetails> typeLibraryClassDetailsList = codeGenCtx.getInputOptions().getTypeLibraryClassDetails(); for(TypeLibraryClassDetails typeLibraryClassDetail : typeLibraryClassDetailsList){ String filePath = CodeGenUtil.toJavaSrcFilePath(destLocation + File.separatorChar + "src",typeLibraryClassDetail.getPackageName() + "." + typeLibraryClassDetail.getClassName()); File javaTypeFile = new File(filePath); if(javaTypeFile.exists()){ boolean isFileDeleted = javaTypeFile.delete(); if(!isFileDeleted){ getLogger().log(Level.WARNING, " File " + javaTypeFile.getAbsolutePath() + " could not be deleted. This file is mentioned thru the option " + InputOptions.OPT_TYPE_LIBRARY_XML_FILE ); } } } } /** * Deletes the Java files ObjectFactory.java and package-info.java from name space SOAConstants.SOA_TYPES_NAMESPACE. * @param ctx * @throws CodeGenFailedException */ @SuppressWarnings("deprecation") private void deleteDuplicateFilesFromDefaultPackage(CodeGenContext ctx)throws CodeGenFailedException { NSToPkgMappingList ns2PkgList = ctx.getInputOptions().getNSToPkgMappingList(); String packageName = getpackageForNamespace(SOAConstants.SOA_TYPES_NAMESPACE,ns2PkgList); deleteEachObjectFactoryClass(ctx, packageName); deleteEachPackageInfoClass(ctx, packageName); } private String[] getWSDL2JavaToolArgsForTypeLib(TypeLibraryCodeGenContext typeLibraryCodeGenContext, String outputDirectory) throws CodeGenFailedException{ List<String> argsList = new ArrayList<String>(); TypeLibraryInputOptions inputOptions = typeLibraryCodeGenContext.getTypeLibraryInputOptions(); argsList.add("-o"); // all generated files location (parent dir) argsList.add(outputDirectory); argsList.add("-s"); // sync style argsList.add("-l"); // language argsList.add("java"); argsList.add("-d"); // data binding argsList.add("jaxbri"); argsList.add("-ss"); // server side argsList.add("-ssi"); // Service skeleton interface argsList.add("-noWSDL"); argsList.add("-noBuildXML"); argsList.add("-noMessageReceiver"); argsList.add("-p"); // package name for interface argsList.add(getTypePackageForTypeLib(typeLibraryCodeGenContext)); argsList.add("-ebc"); argsList.add("org.ebayopensource.turmeric.runtime.common.exceptions.ServiceException"); argsList.add("-uri"); // wsdl file uri argsList.add(inputOptions.getV4WsdlLocation()); argsList.add("-sin"); //name for the generated interface argsList.add(typeLibraryCodeGenContext.getServiceName()); String[] args = argsList.toArray(new String[0]); return args; } private String getTypePackageForTypeLib(TypeLibraryCodeGenContext typeLibraryCodeGenContext){ String typePackage = null; TypeLibraryInputOptions inputOptions = typeLibraryCodeGenContext.getTypeLibraryInputOptions(); if(inputOptions.getV4Pkg() != null) typePackage = inputOptions.getV4Pkg(); else typePackage = typeLibraryCodeGenContext.getInterfacePkg(); return typePackage; } private void deleteObjectFactoryfile(CodeGenContext ctx)throws CodeGenFailedException { Set<String> allNamespaces = null; try { allNamespaces = WsdlParserUtil.getAllTargetNamespces(ctx.getInputOptions().getInputFile()); } catch (Exception e) { s_logger.log(Level.SEVERE,"could not find namespaces present in the wsdl"); throw new CodeGenFailedException("TargetNamespace details for schema section of wsdl could not be found "); } @SuppressWarnings("deprecation") NSToPkgMappingList ns2PkgList = ctx.getInputOptions().getNSToPkgMappingList(); //One objectFactory per namespace hence needs to be deleted. for(String currentnamespace : allNamespaces) { String packageName = getpackageForNamespace(currentnamespace,ns2PkgList); s_logger.log(Level.INFO,"Namespace mapped for ObjectFactory is " +currentnamespace); s_logger.log(Level.INFO,"package found for ObjectFactory to be deleted is "+ packageName); deleteEachObjectFactoryClass(ctx, packageName); } } private String getpackageForNamespace(String currentnamespace,NSToPkgMappingList mappingList) { if(mappingList==null) return WSDLUtil.getPackageFromNamespace(currentnamespace); else { Iterator<NSPkgMappingType> itr = mappingList.getPkgNsMap().iterator(); while(itr.hasNext()) { NSPkgMappingType currentMappingType = itr.next(); String currentNamespaceInMappings = currentMappingType.getNamespace(); if(currentNamespaceInMappings.equals(currentnamespace)) return currentMappingType.getPackage(); } } return WSDLUtil.getPackageFromNamespace(currentnamespace); } /**This would delete each objectfactory class generated for the wsdl * @param ctx * @param packageForObjectFact */ private void deleteEachObjectFactoryClass(CodeGenContext ctx, String packageForObjectFact) { deleteClassFromGivenPackage(ctx, packageForObjectFact, "ObjectFactory.java"); } /**This would delete each package-info.java generated for the wsdl * @param ctx * @param packageForObjectFact */ private void deleteEachPackageInfoClass(CodeGenContext ctx, String packageForObjectFact) { deleteClassFromGivenPackage(ctx, packageForObjectFact, "package-info.java"); } /** * This deletes the java source file for the class name specified in the given package. * @param ctx * @param packageForObjectFact * @param className */ private void deleteClassFromGivenPackage(CodeGenContext ctx, String packageForObjectFact, String className) { String projectRoot = ctx.getProjectRoot()==null?ctx.getDestLocation():ctx.getProjectRoot(); String folderName = CodeGenUtil.getFolderPathFrompackageName(packageForObjectFact); String fileName = CodeGenUtil.toOSFilePath(projectRoot) + CodeGenConstants.GEN_SRC_FOLDER + File.separatorChar + "src" + File.separatorChar + folderName + className; File fileToBeDeleted = new File(fileName); s_logger.log(Level.INFO,"Class '"+className+"' is at " +fileName); try { CodeGenUtil.deleteFile(fileToBeDeleted ); s_logger.log(Level.INFO,"Deleted '"+className+"' under package "+ packageForObjectFact); } catch (Exception exception) { fileName = CodeGenUtil.toOSFilePath(ctx.getJavaSrcDestLocation(true)) + "src" + File.separatorChar + folderName + className; fileToBeDeleted = new File(fileName); s_logger.log(Level.INFO,"Class '"+className+"' is at " +fileName); try { CodeGenUtil.deleteFile(fileToBeDeleted ); s_logger.log(Level.INFO,"Deleted "+className+" under package "+ packageForObjectFact); } catch (Exception ex) { s_logger.log(Level.INFO,"Could not delete "+className+" under the package " + packageForObjectFact + " due to " + ex.getMessage()); } } } private String getWarningErorFromLog(File wsdl2javaLogFile) { StringBuffer stringBuffer = new StringBuffer(); BufferedReader br = null; try { br = new BufferedReader(new FileReader(wsdl2javaLogFile)); String lineStr = null; while((lineStr = br.readLine()) != null){ if( lineStr.startsWith("WARNING:") || lineStr.startsWith("SEVERE:")) stringBuffer.append(lineStr).append("\n"); } } catch (FileNotFoundException e) { getLogger().log(Level.INFO, e.getMessage(), e); } catch (IOException e) { getLogger().log(Level.INFO, e.getMessage(), e); } finally{ CodeGenUtil.closeQuietly(br); } return stringBuffer.toString(); } private void removeFileHandlerForWSDL2JavaLog() { LogManager.getGeneralLogger().removeHandler(m_fileHandlerForWSDL2Java); } private File getLogFileOfWSDL2JavaCall() { File tempFile = null; try { tempFile = File.createTempFile("wsdl2java", ".log"); getLogger().log(Level.INFO,"Location of wsdl2java log file : " + tempFile.getPath()); m_fileHandlerForWSDL2Java = new FileHandler(tempFile.getPath()); m_fileHandlerForWSDL2Java.setFormatter(new java.util.logging.SimpleFormatter()); LogManager.getGeneralLogger().addHandler(m_fileHandlerForWSDL2Java); } catch (IOException e) { getLogger().log(Level.INFO, "Exception while trying to create a temporary file for storing wsdl2java log : \n " + e); } return tempFile; } private void deleteAdditionalJavaTypeFiles(CodeGenContext codeGenCtx, String destLocation) throws Exception { getLogger().entering(); InputOptions inputOptions = codeGenCtx.getInputOptions(); String serviceName = inputOptions.getServiceAdminName(); SOATypeRegistry typeRegistry = SOAGlobalRegistryFactory.getSOATypeRegistryInstance(); Set<String> dependentLibraries = getAllDependentLibraryNames(codeGenCtx,serviceName); if(dependentLibraries.size() == 0) { getLogger().log(Level.SEVERE, "No Dependent libraries found, duplicate types might be generated! (Check your META-INF/" + serviceName + "/TypeDependencies.xml)"); return; } getLogger().log(Level.INFO, "Dependent libraries are : " + detail(dependentLibraries)); // for(String libraryName : dependentLibraries){ // try{ // typeRegistry.addTypeLibraryToRegistry(libraryName); // }catch(Exception e){ // getLogger().log(Level.WARNING, "Exception while trying to populate the registry for library : "+ libraryName +"\n" + // "Exception is : " + e.getMessage()); // } // } /* * Instead of populating registry library by library, populate at one go for entire list of libraries. * What difference it makes is, TypeInformation.xml is parsed for all libraries first and * then TypeDependecies.xml is parsed for all libraries. */ List<String> dependentLibrariesList = new ArrayList<String>(dependentLibraries); try { typeRegistry.populateRegistryWithTypeLibraries(dependentLibrariesList); } catch (Exception e) { getLogger().log(Level.WARNING, "Exception while trying to populate the registry for dependent libaries : " + "Exception is : " + e.getMessage(), e); } // identify the possible java types to be deleted. // Map of <fully qualified java type name> to <type library name> Map<String,String> javaTypesToDelete = new HashMap<String,String>(); for(TypeLibraryType typeLibraryType : typeRegistry.getAllTypeLibraries()) { for(TypeInformationType typeInformationType : typeLibraryType.getType()){ javaTypesToDelete.put(typeInformationType.getJavaTypeName(), typeLibraryType.getLibraryName()); } } if(getLogger().isInfoEnabled()) { StringBuilder d = new StringBuilder(); d.append("Dependendant Library Java Types (to be deleted): "); d.append("(count: ").append(javaTypesToDelete.size()).append(")"); int maxTypeLength = 0; for(String typeclassname: javaTypesToDelete.keySet()) { int len = typeclassname.length(); if(len > maxTypeLength) { maxTypeLength = len; } } int idx=0; for(Map.Entry<String,String> entry : javaTypesToDelete.entrySet()) { d.append(String.format("%n %3d) %-" + maxTypeLength + "s - [%s]", idx++, entry.getKey(), entry.getValue())); } getLogger().log(Level.INFO, d.toString()); } String osDestDir = CodeGenUtil.toOSFilePath(destLocation); Set<String> searchPaths = new LinkedHashSet<String>(); searchPaths.add(osDestDir); // Standard Mode searchPaths.add(CodeGenUtil.toOSFilePath(codeGenCtx.getJavaSrcDestLocation())); // Standard Mode searchPaths.add(CodeGenUtil.toOSFilePath(osDestDir + "src")); // Legacy Mode String projectRoot = CodeGenUtil.toOSFilePath(inputOptions.getProjectRoot()); if(!CodeGenUtil.isEmptyString(projectRoot)) { searchPaths.add(CodeGenUtil.toOSFilePath(projectRoot + "src")); // Legacy Mode searchPaths.add(CodeGenUtil.toOSFilePath(projectRoot + "gen-src")); // Legacy Mode } getLogger().log(Level.INFO, "Search Paths: " + detail(searchPaths)); for(Map.Entry<String,String> entry : javaTypesToDelete.entrySet()) { String className = entry.getKey(); for(String searchPath: searchPaths) { File javaFile = new File(CodeGenUtil.toJavaSrcFilePath(searchPath,className)); getLogger().log(Level.FINE, "Java Type Path : " + javaFile); if(javaFile.exists()) { if(javaFile.delete()) { getLogger().log(Level.INFO, String.format("Deleted Generated Type [%s] (declared in dependent lib [%s]): %s", className, entry.getValue(), javaFile)); } else { getLogger().log(Level.WARNING, String.format("Unable to Delete Generated Type [%s] (declared in dependent lib [%s]): %s", className, entry.getValue(), javaFile)); } } } } getLogger().exiting(); } private String detail(Collection<String> coll) { if(coll == null) { return "<null>"; } StringBuilder d = new StringBuilder(); d.append(coll.getClass().getSimpleName()).append(" of size ["); d.append(coll.size()).append("]"); int idx=0; for(String s: coll) { d.append(String.format("%n %3d) %s", idx++, s)); } return d.toString(); } private Set<String> getAllDependentLibraryNames(CodeGenContext context, String libraryName) throws Exception { TypeLibraryParser parser = TypeLibraryParser.getInstance(); try { TypeLibraryInputOptions typelibInputOptions = new TypeLibraryInputOptions(); typelibInputOptions.setProjectRoot(context.getProjectRoot()); typelibInputOptions.setMetaSrcLocation(context.getInputOptions().getMetaSrcLocation()); TypeLibraryCodeGenContext tempTypeLibContext = new TypeLibraryCodeGenContext(typelibInputOptions, null); parser.processTypeDepXMLFileForGen(tempTypeLibContext, libraryName); // for (String libName : parser.getReferredTypeLibraries()) { // parser.processTypeDepXMLFile(libName); // } /* * Instead of parsing just second level, parse till nth level deep. */ Set<String> orgReferedTypeLibraries = new HashSet<String>( parser.getReferredTypeLibraries() ); doRecursiveSearchForReferredLibs(parser, orgReferedTypeLibraries); } catch (Exception e) { getLogger().log(Level.SEVERE, e.getMessage(), e); } return parser.getReferredTypeLibraries(); } /** * This method parses all the TypeDependcies.xml in the tree and finds out all the libraries referred till nth level. * @param parser * @param typeLibrariesToBeParsed * @throws Exception */ private void doRecursiveSearchForReferredLibs(TypeLibraryParser parser, Set<String> typeLibrariesToBeParsed) throws Exception{ Set<String> orgReferedTypeLibraries = new HashSet<String>( parser.getReferredTypeLibraries() ); Set<String> currentReferedTypeLibraries = processTypeDepXMLForAllLibaries(parser, typeLibrariesToBeParsed); if(currentReferedTypeLibraries.size() != orgReferedTypeLibraries.size() ){ Set<String> deeplyReferedTypeLibraries = getDeeplyAddedLibraries( orgReferedTypeLibraries, currentReferedTypeLibraries ); doRecursiveSearchForReferredLibs(parser, deeplyReferedTypeLibraries); } } /** * This method compares two sets and finds out the extra ones added in the second set. * @param orgReferedTypeLibraries * @param curReferedTypeLibraries * @return */ private Set<String> getDeeplyAddedLibraries(Set<String> orgReferedTypeLibraries, Set<String> curReferedTypeLibraries){ Set<String> deeplyReferedTypeLibraries = new HashSet<String>(); for(String library : curReferedTypeLibraries){ if( !(orgReferedTypeLibraries.contains(library)) ){ deeplyReferedTypeLibraries.add(library); } } return deeplyReferedTypeLibraries; } /** * This method parses the TypeDependcies.xml for the given set of libraries * @param parser * @param referedTypeLibraries * @return * @throws Exception */ private Set<String> processTypeDepXMLForAllLibaries(TypeLibraryParser parser, Set<String> referedTypeLibraries) throws Exception{ Iterator it = referedTypeLibraries.iterator(); while (it.hasNext()) { String libName = (String) it.next(); parser.processTypeDepXMLFile(libName); } return new HashSet<String>( parser.getReferredTypeLibraries() ); } public String wsdl2JavaGenSrcLoc(String srcLocPrefix) { return CodeGenUtil.toOSFilePath(srcLocPrefix) + "src"; } private String getWSDLFileName(String svcName) { return svcName + ".wsdl"; } private String[] getJava2WSDLToolArgs( CodeGenContext codeGenCtx, String qualifiedIntfName, String destLocation) { List<String> argsList = new ArrayList<String>(); InputOptions inputOptions = codeGenCtx.getInputOptions(); PkgToNSMappingList pkgNsMapList = getPkgToNSMapList(inputOptions); List<String> pkgNSMapList = WSDLUtil.buildPkgNSMapList(pkgNsMapList, JAVA_2_WSDL_PKG_TO_NS_PATTERN); argsList.add("-o"); //output location argsList.add(destLocation); argsList.add("-tn"); // target namespace argsList.add(codeGenCtx.getNamespace()); argsList.add("-tp"); // target namespace prefix argsList.add("svc"); argsList.add("-stn"); // schema target namespace argsList.add(codeGenCtx.getNamespace()); argsList.add("-sn"); // service name argsList.add(codeGenCtx.getServiceAdminName()); argsList.add("-of"); // output file name argsList.add(getWSDLFileName(codeGenCtx.getServiceAdminName())); argsList.add("-efd"); // element form default argsList.add("qualified"); argsList.add("-afd"); // attribute form default argsList.add("unqualified"); argsList.add("-st"); // style of binding for the WSDL argsList.add("document"); argsList.add("-u"); // Binding use for the WSDL argsList.add("literal"); argsList.add("-l"); // location URL if (CodeGenUtil.isEmptyString( inputOptions.getServiceLocation())) { argsList.add(CodeGenConstants.DEFAULT_SERVICE_URL + codeGenCtx.getServiceAdminName()); } else { argsList.add(inputOptions.getServiceLocation()); } argsList.add("-cn"); // Class name argsList.add(qualifiedIntfName); if (codeGenCtx.isBinLocAddedToClasspath() == true) { argsList.add("-cp"); // Classpath Entries argsList.add(codeGenCtx.getBinLocation()); } if (!pkgNSMapList.isEmpty()) { for (String pkgNsMapEntry : pkgNSMapList) { argsList.add("-p2n"); argsList.add(pkgNsMapEntry); } } String[] args = argsList.toArray(new String[0]); return args; } private String[] getWSDL2JavaToolArgs( CodeGenContext codeGenCtx, String destLocation) throws CodeGenFailedException{ InputOptions inputOptions = codeGenCtx.getInputOptions(); String wsdlFileLoc = inputOptions.getInputFile(); List<String> argsList = new ArrayList<String>(); argsList.add("-o"); // all generated files location (paretn dir) argsList.add(destLocation); argsList.add("-s"); // sync style argsList.add("-l"); // language argsList.add("java"); argsList.add("-d"); // data binding argsList.add("jaxbri"); argsList.add("-ss"); // server side argsList.add("-ssi"); // Service skelton interface argsList.add("-noWSDL"); argsList.add("-noBuildXML"); argsList.add("-noMessageReceiver"); String interfacePkgName = inputOptions.getGenInterfacePackage(); if (CodeGenUtil.isEmptyString(interfacePkgName)) { Map<String, String> ns2PkgMap = WSDLUtil.getNS2PkgMappings(inputOptions); try { String className = WSDLUtil.getInterfaceName(wsdlFileLoc, null, ns2PkgMap,codeGenCtx); interfacePkgName = CodeGenUtil.getPackageName(className); inputOptions.setGenInterfacePackage(interfacePkgName); } catch (PreProcessFailedException ex) { throw new CodeGenFailedException( "Failed to derive interface package name. ", ex); } } argsList.add("-p"); // package name for interface argsList.add(interfacePkgName); String interfaceClassName = inputOptions.getGenInterfaceName(); if(CodeGenUtil.isEmptyString(interfaceClassName)){ Map<String, String> ns2PkgMap = WSDLUtil.getNS2PkgMappings(inputOptions); try { String className = WSDLUtil.getInterfaceName(wsdlFileLoc, interfacePkgName, ns2PkgMap,codeGenCtx); int indexOfLastDot = className.lastIndexOf("."); interfaceClassName = className.substring(indexOfLastDot+1); inputOptions.setGenInterfaceName(interfaceClassName); } catch (PreProcessFailedException ex) { throw new CodeGenFailedException( "Failed to derive interface class name. ", ex); } } argsList.add("-sin"); //name for the generated interface argsList.add(interfaceClassName); //argsList.add("-ep");//exclude package, deletes the packages mentioned thru this option. //argsList.add("org.ebayopensource.turmeric.runtime.types"); String derivedNS2PkgStr = getNS2PkgMappings(inputOptions); if(!CodeGenUtil.isEmptyString(derivedNS2PkgStr)) { argsList.add("-ns2p"); argsList.add(derivedNS2PkgStr); } for(String bindingFileName : inputOptions.getBindingFileNames()){ if(!CodeGenUtil.isEmptyString(bindingFileName)){ argsList.add("-EbindingFileName"); argsList.add(bindingFileName); } } String proxyHost = codeGenCtx.getInputOptions().getHttpProxyHost(); String proxyPort = codeGenCtx.getInputOptions().getHttpProxyPort(); if( !CodeGenUtil.isEmptyString(proxyHost) && !CodeGenUtil.isEmptyString(proxyPort)){ argsList.add("-http-proxy-host"); argsList.add(proxyHost); argsList.add("-http-proxy-port"); argsList.add(proxyPort); } argsList.add("-ebc"); argsList.add("org.ebayopensource.turmeric.runtime.common.exceptions.ServiceException"); argsList.add("-uri"); // wsdl file uri argsList.add(wsdlFileLoc); String[] args = argsList.toArray(new String[0]); return args; } private String getNS2PkgMappings(InputOptions inputOptions){ String derivedNS2PkgStr=""; Map<String,String> ns2PkgMap = WSDLUtil.getNS2PkgMappings(inputOptions); for(Map.Entry<String, String> keyValuePair : ns2PkgMap.entrySet()){ derivedNS2PkgStr += keyValuePair.getKey() + "=" + keyValuePair.getValue() + ","; } if(derivedNS2PkgStr.endsWith(",")) derivedNS2PkgStr = derivedNS2PkgStr.substring(0, derivedNS2PkgStr.length() - 1 ); return derivedNS2PkgStr; } private PkgToNSMappingList getPkgToNSMapList(InputOptions inputOptions) { return inputOptions.getPkgNSMappings(); } private void prettyFormatWSDL(String wsdlFilePath) { File wsdlFile = new File(wsdlFilePath); if (wsdlFile.exists()) { WSDLPrettyFormatter wsdlPrettyFormatter = new WSDLPrettyFormatter(); wsdlPrettyFormatter.prettyFormat(wsdlFile); } } private String getOpNameToCemcMapString(OpNameToCemcMappingList opNameToCemcMappings) { if (opNameToCemcMappings == null || opNameToCemcMappings.getOpNameCemcMap().size() == 0) { return null; } else { StringBuilder strBuilder = new StringBuilder(); for (OpNameCemcMappingType opNameCemcMapEntry : opNameToCemcMappings.getOpNameCemcMap()) { strBuilder.append(opNameCemcMapEntry.getOperationName()) .append("=") .append(opNameCemcMapEntry.getCustomErrMsgClass()) .append(","); } strBuilder.setLength(strBuilder.length()-1); return strBuilder.toString(); } } }
/* * Copyright 2001-2010 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.juddi.v3.client.config; import java.rmi.RemoteException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.juddi.v3.client.embed.EmbeddedRegistry; import org.apache.juddi.v3.annotations.AnnotationProcessor; import org.apache.juddi.v3.client.ClassUtil; import org.apache.juddi.v3.client.transport.InVMTransport; import org.apache.juddi.v3.client.transport.Transport; import org.apache.juddi.v3.client.transport.TransportException; import org.uddi.api_v3.BindingTemplate; import org.uddi.api_v3.BusinessService; public class UDDIClerkManager { private static Log log = LogFactory.getLog(UDDIClerkManager.class); private ClientConfig clientConfig = null; private String CONFIG_FILE = "META-INF/uddi.xml"; private Properties properties = null; public UDDIClerkManager() throws ConfigurationException { super(); clientConfig = new ClientConfig(CONFIG_FILE, properties); } /** * Manages the clerks. Initiates reading the client configuration from the uddi.xml. * @throws ConfigurationException */ public UDDIClerkManager(String configurationFile) throws ConfigurationException { super(); clientConfig = new ClientConfig(configurationFile); } /** * Manages the clerks. Initiates reading the client configuration from the uddi.xml. * @throws ConfigurationException */ public UDDIClerkManager(String configurationFile, Properties properties) throws ConfigurationException { super(); clientConfig = new ClientConfig(configurationFile, properties); } /** * Stops the clerks. * @throws ConfigurationException */ public void stop() throws ConfigurationException { log.info("Stopping UDDI Clerks for manager " + clientConfig.getManagerName()); releaseResources(); UDDIClientContainer.removeClerkManager(getName()); //If running in embedded mode if (InVMTransport.class.getCanonicalName().equals(getClientConfig().getHomeNode().getProxyTransport())) { log.info("Shutting down embedded Server"); stopEmbeddedServer(); } log.info("UDDI Clerks shutdown completed for manager " + clientConfig.getManagerName()); } private void releaseResources() { unRegisterBindingsOfAnnotatedServices(true); } /** * Initializes the UDDI Clerk. * @throws ConfigurationException */ public void start() throws ConfigurationException { if (UDDIClientContainer.addClerkManager(this)) { //If running in embedded mode if (InVMTransport.class.getCanonicalName().equals(getClientConfig().getHomeNode().getProxyTransport())) { log.info("Starting embedded Server"); startEmbeddedServer(); } Runnable runnable = new BackGroundRegistration(this); Thread thread = new Thread(runnable); thread.start(); } } protected void startEmbeddedServer() throws ConfigurationException { try { String embeddedServerClass = getClientConfig().getHomeNode().getProperties().getProperty("embeddedServer","org.apache.juddi.v3.client.embed.JUDDIRegistry"); Class<?> clazz = ClassUtil.forName(embeddedServerClass, this.getClass()); EmbeddedRegistry embeddedRegistry = (EmbeddedRegistry) clazz.newInstance(); embeddedRegistry.start(); } catch (Exception e) { throw new ConfigurationException(e.getMessage(),e); } } protected void stopEmbeddedServer() throws ConfigurationException { try { String embeddedServerClass = getClientConfig().getHomeNode().getProperties().getProperty("embeddedServer","org.apache.juddi.v3.client.embed.JUDDIRegistry"); Class<?> clazz = ClassUtil.forName(embeddedServerClass, this.getClass()); EmbeddedRegistry embeddedRegistry = (EmbeddedRegistry) clazz.newInstance(); embeddedRegistry.stop(); } catch (Exception e) { throw new ConfigurationException(e.getMessage(),e); } } public void restart() throws ConfigurationException { stop(); start(); } /** * Saves the clerk and node info from the uddi.xml to the home jUDDI registry. * This info is needed if you want to JUDDI Server to do XRegistration/"replication". */ public void saveClerkAndNodeInfo() { Map<String,UDDIClerk> uddiClerks = clientConfig.getUDDIClerks(); if (uddiClerks.size() > 0) { //obtaining a clerk that can write to the home registry UDDIClerk homeClerk=null; for (UDDIClerk clerk : uddiClerks.values()) { if (clerk.getUDDINode().isHomeJUDDI()) { homeClerk = clerk; } } //registering nodes and clerks if (homeClerk!=null) { int numberOfHomeJUDDIs=0; for (UDDINode uddiNode : clientConfig.getUDDINodes().values()) { if (uddiNode.isHomeJUDDI()) numberOfHomeJUDDIs++; homeClerk.saveNode(uddiNode.getApiNode()); } if (numberOfHomeJUDDIs==1) { for (UDDIClerk clerk : clientConfig.getUDDIClerks().values()) { homeClerk.saveClerk(clerk); } } else { log.error("The client config needs to have one homeJUDDI node and found " + numberOfHomeJUDDIs); } } else { log.debug("No home clerk found."); } } } /** * X-Register services listed in the uddi.xml */ public void xRegister() { log.debug("Starting cross registration..."); //XRegistration of listed businesses Set<XRegistration> xBusinessRegistrations = clientConfig.getXBusinessRegistrations(); for (XRegistration xRegistration : xBusinessRegistrations) { xRegistration.xRegisterBusiness(); } //XRegistration of listed serviceBindings Set<XRegistration> xServiceBindingRegistrations = clientConfig.getXServiceBindingRegistrations(); for (XRegistration xRegistration : xServiceBindingRegistrations) { xRegistration.xRegisterServiceBinding(); } log.debug("Cross registration completed"); } /** * Registers services to UDDI using a clerk, and the uddi.xml * configuration. */ public void registerAnnotatedServices() { Map<String,UDDIClerk> uddiClerks = clientConfig.getUDDIClerks(); if (uddiClerks.size() > 0) { AnnotationProcessor ap = new AnnotationProcessor(); for (UDDIClerk uddiClerk : uddiClerks.values()) { Collection<BusinessService> services = ap.readServiceAnnotations( uddiClerk.getClassWithAnnotations(),uddiClerk.getUDDINode().getProperties()); for (BusinessService businessService : services) { log.info("Node=" + uddiClerk.getUDDINode().getApiNode().getName()); uddiClerk.register(businessService, uddiClerk.getUDDINode().getApiNode()); } } } } /** * Removes the service and all of its bindingTemplates of the annotated classes. * @throws TransportException * @throws RemoteException */ public void unRegisterAnnotatedServices() { Map<String,UDDIClerk> clerks = clientConfig.getUDDIClerks(); if (clerks.size() > 0) { AnnotationProcessor ap = new AnnotationProcessor(); for (UDDIClerk clerk : clerks.values()) { Collection<BusinessService> services = ap.readServiceAnnotations( clerk.getClassWithAnnotations(),clerk.getUDDINode().getProperties()); for (BusinessService businessService : services) { clerk.unRegisterService(businessService.getServiceKey(),clerk.getUDDINode().getApiNode()); } } } } /** * Removes the bindings of the services in the annotated classes. Multiple nodes may register * the same service using different BindingTempates. If the last BindingTemplate is removed * the service can be removed as well. * * @param removeServiceWithNoBindingTemplates - if set to true it will remove the service if there * are no other BindingTemplates. */ public void unRegisterBindingsOfAnnotatedServices(boolean removeServiceWithNoBindingTemplates) { Map<String,UDDIClerk> clerks = clientConfig.getUDDIClerks(); if (clerks.size() > 0) { AnnotationProcessor ap = new AnnotationProcessor(); for (UDDIClerk clerk : clerks.values()) { Collection<BusinessService> services = ap.readServiceAnnotations( clerk.getClassWithAnnotations(),clerk.getUDDINode().getProperties()); for (BusinessService businessService : services) { if (businessService.getBindingTemplates() != null) { List<BindingTemplate> bindingTemplates = businessService.getBindingTemplates().getBindingTemplate(); for (BindingTemplate bindingTemplate : bindingTemplates) { clerk.unRegisterBinding(bindingTemplate.getBindingKey(), clerk.getUDDINode().getApiNode()); } } if (removeServiceWithNoBindingTemplates) { try { BusinessService existingService = clerk.findService(businessService.getServiceKey(), clerk.getUDDINode().getApiNode()); if (existingService.getBindingTemplates()==null || existingService.getBindingTemplates().getBindingTemplate().size()==0) { clerk.unRegisterService(businessService.getServiceKey(),clerk.getUDDINode().getApiNode()); } } catch (Exception e) { log.error(e.getMessage(),e); } } } } } } public ClientConfig getClientConfig() { return clientConfig; } public String getName() { return clientConfig.getManagerName(); } /** * @deprecated, use the getTransport(String nodeName) instead. * Returns the "default" jUDDI nodes Transport. * * @return * @throws ConfigurationException */ public Transport getTransport() throws ConfigurationException { return getTransport("default"); } /** * Returns the transport defined for the node with the given nodeName. * @param nodeName * @return * @throws ConfigurationException */ public Transport getTransport(String nodeName) throws ConfigurationException { try { String clazz = clientConfig.getHomeNode().getProxyTransport(); String managerName = clientConfig.getManagerName(); Class<?> transportClass = ClassUtil.forName(clazz, UDDIClerkManager.class); if (transportClass!=null) { Transport transport = (Transport) transportClass.getConstructor(String.class,String.class).newInstance(managerName,nodeName); return transport; } else { throw new ConfigurationException ("ProxyTransport was not defined in the " + clientConfig.getConfigurationFile()); } } catch (Exception e) { throw new ConfigurationException (e.getMessage(),e); } } }
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.collide.client.util; import com.google.collide.json.shared.JsonArray; import com.google.collide.shared.util.JsonCollections; import com.google.gwt.user.client.Timer; import elemental.dom.Node; import elemental.events.Event; import elemental.events.EventListener; import elemental.events.EventRemover; import elemental.events.EventTarget; import elemental.events.MouseEvent; import elemental.html.Element; /** * Controller to manage a group of elements that are hovered and unhovered * together. For example, you can use this controller to link a button to its * submenu. * * When the user mouses over any of the "partner" elements, the controller calls * {@link HoverListener#onHover()}. When the user mouses out of all partner * elements and does not mouse over one of the elements within a fixed delay, * the controller calls {@link UnhoverListener#onUnhover()}. * * The default delay is 1300ms, but you can override this. We recommend using * one of the static values so that similar UI components use the same delay. */ public class HoverController { /** * The default unhover delay. */ public static final int DEFAULT_UNHOVER_DELAY = 1300; /** * The unhover delay used for dropdown UI components, such as button menus. */ public static final int DROP_DOWN_UNHOVER_DELAY = 300; /** * Listener interface to be notified of hover state changes. */ public static interface HoverListener { /** * Handles the event when the user hovers one of the partner elements. */ public void onHover(); } /** * Listener interface to be notified of hover state changes. */ public static interface UnhoverListener { /** * Handles the event when the user unhovers one of the partner elements, and * does not hover another partner element within the fixed delay. */ public void onUnhover(); } /** * A helper class to store a partner element and it's event removers. */ private class PartnerHolder { private final Element element; private final EventRemover mouseOverRemover; private final EventRemover mouseOutRemover; PartnerHolder(final Element element) { this.element = element; mouseOverRemover = element.addEventListener(Event.MOUSEOVER, new EventListener() { @Override public void handleEvent(Event evt) { if (relatedTargetOutsideElement((MouseEvent) evt)) { hover(); } } }, false); mouseOutRemover = element.addEventListener(Event.MOUSEOUT, new EventListener() { @Override public void handleEvent(Event evt) { if (relatedTargetOutsideElement((MouseEvent) evt)) { unhover(); } } }, false); } Element getElement() { return element; } void teardown() { mouseOverRemover.remove(); mouseOutRemover.remove(); } /** * Checks if the related target of the MouseEvent (the "from" element for a * mouseover, the "to" element for a mouseout) is actually outside of the * partner element. If the target element contains children, we will receive * mouseover/mouseout events when the mouse moves over/out of the children, * even if the mouse is still within the partner element. These * intra-element events don't affect the hover state of the partner element, * so we want to ignore them. */ private boolean relatedTargetOutsideElement(MouseEvent evt) { EventTarget relatedTarget = evt.getRelatedTarget(); return relatedTarget == null || !element.contains((Node) relatedTarget); } } private HoverListener hoverListener; private UnhoverListener unhoverListener; private boolean isHovering = false; private int unhoverDelay = DEFAULT_UNHOVER_DELAY; private Timer unhoverTimer; private final JsonArray<PartnerHolder> partners = JsonCollections.createArray(); /** * Adds a partner element to this controller. See class javadoc for * an explanation of the interaction between partner elements. */ public void addPartner(Element element) { if (!hasPartner(element)) { partners.add(new PartnerHolder(element)); } } /** * Removes a partner element from this controller. */ public void removePartner(Element element) { for (int i = 0, n = partners.size(); i < n; ++i) { PartnerHolder holder = partners.get(i); if (holder.getElement() == element) { holder.teardown(); partners.remove(i); break; } } } private boolean hasPartner(Element element) { for (int i = 0, n = partners.size(); i < n; ++i) { PartnerHolder holder = partners.get(i); if (holder.getElement() == element) { return true; } } return false; } /** * Sets the listener that will receive events when any of the partner elements * is hovered. */ public void setHoverListener(HoverListener listener) { this.hoverListener = listener; } /** * Sets the listener that will receive events when all of the partner elements * are unhovered. */ public void setUnhoverListener(UnhoverListener listener) { this.unhoverListener = listener; } /** * Sets the delay between the last native mouseout event and when * {@link UnhoverListener#onUnhover()} is called. If the user mouses out of * one partner element and over another partner element within the unhover * delay, the unhover event is not triggered. * * If the delay is zero, the unhover listener is called synchronously. If the * delay is less than zero, the unhover listener is never called. * * @param delay the delay in milliseconds */ public void setUnhoverDelay(int delay) { this.unhoverDelay = delay; } /** * Cancels the unhover timer if one is pending. This will prevent an unhover * listener from firing until the next time the user mouses out of a partner * element. */ public void cancelUnhoverTimer() { if (unhoverTimer != null) { unhoverTimer.cancel(); unhoverTimer = null; } } /** * Flushes the unhover timer if one is pending. This will reset the hover * controller to a state where it can fire a hover event the next time the * element is hovered. */ public void flushUnhoverTimer() { if (unhoverTimer != null) { cancelUnhoverTimer(); unhoverNow(); } } /** * Updates the state of the controller to indicate that the user is hovering * over one of the partner elements. */ private void hover() { cancelUnhoverTimer(); // Early exit if already hovering. if (isHovering) { return; } isHovering = true; if (hoverListener != null) { hoverListener.onHover(); } } /** * Starts a timer that will update the controller to the unhover state if the * user doesn't hover one of the partner elements within the specified unhover * delay. */ private void unhover() { // Early exit if already unhovering or if the delay is negative. if (!isHovering || unhoverDelay < 0) { return; } if (unhoverDelay == 0) { unhoverNow(); } else if (unhoverTimer == null) { // Wait a short time before unhovering so the user has a chance to move // the mouse from one partner to another. unhoverTimer = new Timer() { @Override public void run() { unhoverNow(); } }; unhoverTimer.schedule(unhoverDelay); } } /** * Updates the state of the controller to indicate that the user is no longer * hovering any of the partner elements. */ private void unhoverNow() { cancelUnhoverTimer(); isHovering = false; if (unhoverListener != null) { unhoverListener.onUnhover(); } } }
// ---------------------------------------------------------------------------- // Copyright 2007-2013, GeoTelematic Solutions, Inc. // All rights reserved // ---------------------------------------------------------------------------- // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---------------------------------------------------------------------------- // Change History: // 2008/06/20 Martin D. Flynn // -Initial release // 2009/05/24 Martin D. Flynn // -Changed "addDBFields" method 'alwaysAdd' to 'defaultAdd' to only add fields // if not explicitly specified in the runtime conf file. // 2010/09/09 Martin D. Flynn // -Added unit conversion for "getOptionalEventField". // 2011/08/21 Martin D. Flynn // -Modified optional group/device map field specification. // ---------------------------------------------------------------------------- package org.opengts; import java.util.Vector; import java.util.Locale; import org.opengts.util.*; import org.opengts.dbtools.*; import org.opengts.dbtypes.*; import org.opengts.geocoder.*; import org.opengts.db.*; import org.opengts.db.tables.*; import org.opengts.db.dmtp.*; /** *** Provides startup initialization.<br> *** This class is loaded by <code>DBConfig.java</code> at startup initialization time, and *** various methods are called within this class to allow custom DB initialization.<br> *** The actual class loaded and executed by <code>DBConfig</code> can be overridden by placing *** the following line in the system 'default.conf' and 'webapp.conf' files: *** <pre> *** startup.initClass=org.opengts.StartupInit *** </pre> *** Where 'org.opengts.opt.StartupInit' is the name of the class you wish to have loaded in *** place of this class file. **/ public class StartupInit // standard/parent StartupInit class implements DBConfig.DBInitialization, DBFactory.CustomFactoryHandler { // ------------------------------------------------------------------------ /* local property keys */ private String PROP_RuleFactory_class = "RuleFactory.class"; private String PROP_PasswordHandler_class = "PasswordHandler.class"; /* extra map data fields */ private String PROP_OptionalEventFields_FleetMap[] = { "OptionalEventFields.FleetMap", "OptionalEventFields.GroupMap", "OptionalEventFields.Device" }; private String PROP_OptionalEventFields_DeviceMap[] = { "OptionalEventFields.DeviceMap", "OptionalEventFields.EventData" }; // ------------------------------------------------------------------------ private boolean didInitRuleFactory = false; private RuleFactory ruleFactoryInstance = null; // ------------------------------------------------------------------------ /** *** Constructor.<br> *** (Created with the DBConfig db startup initialization) **/ public StartupInit() { super(); // <-- Object /* set a default "User-Agent" in the config file properties (if not already present) */ RTProperties cfgFileProps = RTConfig.getConfigFileProperties(); String userAgent = cfgFileProps.getString(RTKey.HTTP_USER_AGENT, null, false); if (StringTools.isBlank(userAgent)) { // no default "http.userAgent" defined in the config-file properties cfgFileProps.setString(RTKey.HTTP_USER_AGENT, "OpenGTS/" + org.opengts.Version.getVersion()); } //Print.logInfo("HTTP User-Agent set to '%s'", HTMLTools.getHttpUserAgent()); } // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ // DBConfig.DBInitialization interface /** *** Pre-DBInitialization.<br> *** This method is called just before the standard database factory classes are initialized/added. **/ public void preInitialization() { if (RTConfig.isWebApp()) { OSTools.printMemoryUsage(); } } // ------------------------------------------------------------------------ /** *** Opportunity to add custom DBFactory classes.<br> *** This method is called just after all standard database factory classes have been intialized/added. *** Additional database factories that are needed for the custom installation may be added here. **/ public void addTableFactories() { /* MUST add standard DBFactories */ DBConfig.addTableFactories(); /* add custom DBFactories here */ //DBAdmin.addTableFactory("com.example.db.tables.MyCustomTable", true); /* add custom RuleFactory */ // See "RuleFactoryExample.java" for more information if (!Device.hasRuleFactory()) { // To add the RuleFactoryExample module: // Device.setRuleFactory(new RuleFactoryExample()); // To add a different customized RuleFactory implementation: // Device.setRuleFactory(new org.opengts.extra.rule.RuleFactoryLite()); RuleFactory rf = this._getRuleFactoryInstance(); if (rf != null) { Device.setRuleFactory(rf); Print.logInfo("RuleFactory installed: " + StringTools.className(rf)); } } /* add custom map event data handler */ EventUtil.OptionalEventFields optEvFlds = this.createOptionalEventFieldsHandler(); EventUtil.setOptionalEventFieldHandler(optEvFlds); Print.logDebug("Installed OptionalEventFieldHandler: " + StringTools.className(optEvFlds)); } private RuleFactory _getRuleFactoryInstance() { /* already initialized? */ if (this.ruleFactoryInstance != null) { return this.ruleFactoryInstance; } else if (this.didInitRuleFactory) { return null; } this.didInitRuleFactory = true; /* get RuleFactory class */ Class rfClass = null; String rfClassName = RTConfig.getString(PROP_RuleFactory_class,null); try { String rfcName = !StringTools.isBlank(rfClassName)? rfClassName : (DBConfig.PACKAGE_EXTRA_ + "rule.RuleFactoryLite"); rfClass = Class.forName(rfcName); rfClassName = rfcName; } catch (Throwable th) { if (!StringTools.isBlank(rfClassName)) { Print.logException("Unable to locate RuleFactory class: " + rfClassName, th); } return null; } /* instantiate RuleFactory */ try { this.ruleFactoryInstance = (RuleFactory)rfClass.newInstance(); return this.ruleFactoryInstance; } catch (Throwable th) { Print.logException("Unable to instantiate RuleFactory: " + rfClassName, th); return null; } } // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ private static class EVField { private String name = ""; private DBField field = null; public EVField(String name) { this.name = StringTools.trim(name); this.field = null; } public EVField(DBField field) { this.name = (field != null)? field._getName() : ""; this.field = field; } public String getName() { return this.name; // not null } public boolean hasDBField() { return (this.field != null); } public DBField getDBField() { return this.field; } } protected EVField[] parseFields(DBFactory factory, String flda[]) { if (factory == null) { return null; } else if (ListTools.isEmpty(flda)) { // no defined field names, return nothing return null; } else { //return factory.getFields(flda); java.util.List<EVField> fldList = new Vector<EVField>(); for (int i = 0; i < flda.length; i++) { String n = StringTools.trim(flda[i]); if (!StringTools.isBlank(n)) { DBField dfld = factory.getField(n); fldList.add((dfld != null)? new EVField(dfld) : new EVField(n)); } } return !ListTools.isEmpty(fldList)? fldList.toArray(new EVField[fldList.size()]) : null; } } // ------------------------------------------------------------------------ private static final String KEY_fuelLevel = "fuelLevel"; private static final String KEY_fuelLevelVolume = "fuelLevelVolume"; private static final String KEY_elapsedTimeStopped = "elapsedTimeStopped"; private static final String KEY_geozoneName = "geozoneName"; private static final String KEY_lastFuelLevel = "lastFuelLevel"; private static final String KEY_lastFuelLevelVolume = "lastFuelLevelVolume"; /** *** Creates a generic custom EventUtil.OptionalEventFields instance *** @return An EventUtil.OptionalEventFields instance **/ protected EventUtil.OptionalEventFields createOptionalEventFieldsHandler() { /* always use EventData to resolve optionalEventFields? */ final boolean useEventDataToResolveFields = true; /* Group/Fleet map fields */ final EVField optFleetFields[] = this.parseFields(Device.getFactory() , RTConfig.getStringArray(PROP_OptionalEventFields_FleetMap , null)); /* Device/Vehicle map fields */ final EVField optVehicFields[] = this.parseFields(EventData.getFactory(), RTConfig.getStringArray(PROP_OptionalEventFields_DeviceMap, null)); /* return OptionalEventFields instance */ return new EventUtil.OptionalEventFields() { // return number of 'optional' fields public int getOptionalEventFieldCount(boolean isFleet) { if (isFleet) { // Group/Fleet map count return ListTools.size(optFleetFields); } else { // Device/Vehicle map count return ListTools.size(optVehicFields); } } // return the title for a specific 'optional' field public String getOptionalEventFieldTitle(int ndx, boolean isFleetMap, Locale locale) { // invalid argument checks if (ndx < 0) { return ""; } // default vars I18N i18n = I18N.getI18N(StartupInit.class, locale); // check map type if (isFleetMap) { // "Fleet" map title if (ndx >= ListTools.size(optFleetFields)) { return ""; } String name = optFleetFields[ndx].getName(); // try custom field names if (name.equalsIgnoreCase(Device.FLD_linkURL)) { DBField dbfld = optFleetFields[ndx].getDBField(); return dbfld.getTitle(locale); } // try EventData/Device.getKeyFieldValue(...) if (useEventDataToResolveFields) { Object val = EventData.getKeyFieldTitle(name, ""/*arg*/, locale); return StringTools.trim(val); } else if (name.equalsIgnoreCase(KEY_fuelLevel) || name.equalsIgnoreCase(KEY_fuelLevelVolume) || name.equalsIgnoreCase(KEY_elapsedTimeStopped) || name.equalsIgnoreCase(KEY_geozoneName) ) { Object val = EventData.getKeyFieldTitle(name, ""/*arg*/, locale); return StringTools.trim(val); } else { Object val = Device.getKeyFieldTitle(name, ""/*arg*/, locale); if (val == null) { val = EventData.getKeyFieldTitle(name, ""/*arg*/, locale); } return StringTools.trim(val); } } else { // "Device" map title if (ndx >= ListTools.size(optVehicFields)) { return ""; } String name = optVehicFields[ndx].getName(); // try custom field names // none ... // try EventData.getKeyFieldValue(...) Object val = EventData.getKeyFieldTitle(name, "", locale); if (false) { // keep these strings in the LocatString_XX.properties files String s1 = i18n.getString("StartupInit.fuelLevelVolume", "Fuel Volume"); String s2 = i18n.getString("StartupInit.elapsedTimeStopped", "Time Stopped"); String s3 = i18n.getString("StartupInit.geozoneName", "Geozone Name"); // i18n.getString("StartupInit.info.digInput", "Digital Input"); } return StringTools.trim(val); } } // return the value for a specific 'optional' field public String getOptionalEventFieldValue(int ndx, boolean isFleetMap, Locale locale, EventDataProvider edp) { // invalid argument checks if (ndx < 0) { return ""; } else if (!(edp instanceof EventData)) { return ""; } // default vars EventData event = (EventData)edp; // non-null Account account = event.getAccount(); Device device = event.getDevice(); if ((account == null) || (device == null)) { return ""; } // check map type if (isFleetMap) { // Group/Fleet map value if (ndx >= ListTools.size(optFleetFields)) { return ""; } String name = optFleetFields[ndx].getName(); // try custom field names if (name.equalsIgnoreCase(Device.FLD_linkURL)) { // NOTE! Enabling 'getLinkURL' and 'getLinkDescrption' requires // that the following property be specified a '.conf' file: // startupInit.Device.LinkFieldInfo=true String url = device.getLinkURL(); if (!StringTools.isBlank(url)) { String desc = device.getLinkDescription(); if (StringTools.isBlank(desc)) { BasicPrivateLabel bpl = Account.getPrivateLabel(account); I18N i18n = I18N.getI18N(StartupInit.class, bpl.getLocale()); desc = i18n.getString("StartupInit.info.link", "Link"); } String a = "<a href='"+url+"' target='_blank'>"+desc+"</a>"; return a; } return ""; } // try EventData/Device.getKeyFieldValue(...) BasicPrivateLabel bpl = Account.getPrivateLabel(account); if (useEventDataToResolveFields) { Object val = event.getKeyFieldValue(name, ""/*arg*/, bpl); return StringTools.trim(val); } else if (name.equalsIgnoreCase(KEY_fuelLevel) || name.equalsIgnoreCase(KEY_fuelLevelVolume) || name.equalsIgnoreCase(KEY_elapsedTimeStopped) || name.equalsIgnoreCase(KEY_geozoneName) ) { Object val = event.getKeyFieldValue(name, ""/*arg*/, bpl); return StringTools.trim(val); } else { Object val = device.getKeyFieldValue(name, ""/*arg*/, bpl); if (val == null) { val = event.getKeyFieldValue(name, ""/*arg*/, bpl); } return StringTools.trim(val); } } else { // "Device" map value if (ndx >= ListTools.size(optVehicFields)) { return ""; } String name = optVehicFields[ndx].getName(); // try custom field names // none ... // try EventData.getKeyFieldValue(...) BasicPrivateLabel bpl = Account.getPrivateLabel(account); Object val = event.getKeyFieldValue(name, "", bpl); return StringTools.trim(val); } } }; } // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ /** *** Post-DBInitialization.<br> *** This method is called after all startup initialization has completed. **/ public void postInitialization() { /* init StatusCode descriptions */ StatusCodes.initStatusCodes(null); // include all status codes /* //The following specifies the list of specific status codes to include: StatusCodes.initStatusCodes(new int[] { StatusCodes.STATUS_LOCATION, StatusCodes.STATUS_MOTION_START, StatusCodes.STATUS_MOTION_IN_MOTION, StatusCodes.STATUS_MOTION_STOP, StatusCodes.STATUS_MOTION_DORMANT, ... include other StatusCodes here ... }); */ /* This sets the description for all accounts, all 'private.xml' domains, and all Localizations. */ //StatusCodes.SetDescription(StatusCodes.STATUS_LOCATION , "Marker"); //StatusCodes.SetDescription(StatusCodes.STATUS_MOTION_START , "Start Point"); //StatusCodes.SetDescription(StatusCodes.STATUS_MOTION_STOP , "Stop Point"); /* Install custom PasswordHandler */ this.initPasswordHandler(); } protected void initPasswordHandler() { String phClassName = RTConfig.getString(PROP_PasswordHandler_class,null); if (StringTools.isBlank(phClassName)) { // ignore } else if (phClassName.equalsIgnoreCase("md5")) { RTProperties rtp = new RTProperties(); rtp.setString(GeneralPasswordHandler.PROP_passwordEncoding, "md5"); GeneralPasswordHandler pwh = new GeneralPasswordHandler(rtp); Account.setDefaultPasswordHandler(pwh); } else if (phClassName.equalsIgnoreCase("md5plain")) { RTProperties rtp = new RTProperties(); rtp.setString(GeneralPasswordHandler.PROP_passwordEncoding, "md5plain"); GeneralPasswordHandler pwh = new GeneralPasswordHandler(rtp); Account.setDefaultPasswordHandler(pwh); } else if (phClassName.equalsIgnoreCase("default")) { GeneralPasswordHandler pwh = new GeneralPasswordHandler(); Account.setDefaultPasswordHandler(pwh); } else { try { Class phClass = Class.forName(phClassName); PasswordHandler pwh = (PasswordHandler)phClass.newInstance(); Account.setDefaultPasswordHandler(pwh); } catch (Throwable th) { // ClassCastException, ClassNotFoundException, ... Print.logException("Unable to instantiate PasswordHandler: " + phClassName, th); } } //Print.logDebug("Default PasswordHandler: " + Account.getDefaultPasswordHandler()); } // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ // DBFactory.CustomFactoryHandler interface /** *** Create a DBFactory instance. The DBFactory initialization process will call this method *** when creating a DBFactory for a given table, allowing this class to override/customize *** any specific table attributes. If this method returns null, the default table DBFactory *** will be created. *** @param tableName The name of the table *** @param field The DBFields in the table *** @param keyType The table key type *** @param rcdClass The DBRecord subclass representing the table *** @param keyClass The DBRecordKey subclass representing the table key *** @param editable True if this table should be editable, false otherwise. *** This value is used by the GTSAdmin application. *** @param viewable True if this table should be viewable, false otherwise. *** An 'editable' table is automatically considered viewable. *** This value is used by the GTSAdmin application. *** @return The DBFactory instance (or null to indicate that the default DBFactory should be created). ***/ public <T extends DBRecord<T>> DBFactory<T> createDBFactory( String tableName, DBField field[], DBFactory.KeyType keyType, Class<T> rcdClass, Class<? extends DBRecordKey<T>> keyClass, boolean editable, boolean viewable) { //Print.logInfo("Intercept creation of DBFactory: %s", tableName); return null; // returning null indicates default behavior } /** *** Augment DBFactory fields. This method is called before fields have been added to any *** given DBFactory. This method may alter the list of DBFields by adding new fields, or *** altering/deleting existing fields. However, deleting/altering fields that have other *** significant systems dependencies may cause unpredictable behavior. *** @param factory The DBFactory *** @param fields The list of fields scheduled to be added to the DBFactory *** @return The list of fields which will be added to the DBFactory **/ public java.util.List<DBField> selectFields(DBFactory factory, java.util.List<DBField> fields) { String tblName = factory.getUntranslatedTableName(); // These additional fields can be enabled by placing the appropriate/specified // property "<key>=true" in a 'custom.conf' file. /* Account */ if (tblName.equalsIgnoreCase(Account.TABLE_NAME())) { // startupInit.Account.AddressFieldInfo=true addDBFields(tblName, fields, Account.OPTCOLS_AddressFieldInfo , false, Account.AddressFieldInfo); // startupInit.Account.MapLegendFieldInfo=true addDBFields(tblName, fields, Account.OPTCOLS_MapLegendFieldInfo , false, Account.MapLegendFieldInfo); // startupInit.Account.AccountManagerInfo=true addDBFields(tblName, fields, Account.OPTCOLS_AccountManagerInfo , false, Account.AccountManagerInfo); // startupInit.Account.DataPushInfo=true addDBFields(tblName, fields, Account.OPTCOLS_DataPushInfo , false, Account.DataPushInfo); return fields; } /* User */ if (tblName.equalsIgnoreCase(User.TABLE_NAME())) { // startupInit.User.AddressFieldInfo=true addDBFields(tblName, fields, User.OPTCOLS_AddressFieldInfo , false, User.AddressFieldInfo); // startupInit.User.ExtraFieldInfo=true addDBFields(tblName, fields, User.OPTCOLS_ExtraFieldInfo , false, User.ExtraFieldInfo); return fields; } /* Device */ if (tblName.equalsIgnoreCase(Device.TABLE_NAME())) { // startupInit.Device.NotificationFieldInfo=true addDBFields(tblName, fields, Device.OPTCOLS_NotificationFieldInfo , false, Device.NotificationFieldInfo); // startupInit.Device.GeoCorridorFieldInfo=true boolean devGC = DBConfig.hasRulePackage(); addDBFields(tblName, fields, Device.OPTCOLS_GeoCorridorFieldInfo , devGC, Device.GeoCorridorFieldInfo); // startupInit.Device.FixedLocationFieldInfo=true addDBFields(tblName, fields, Device.OPTCOLS_FixedLocationFieldInfo , false, Device.FixedLocationFieldInfo); // startupInit.Device.LinkFieldInfo=true addDBFields(tblName, fields, Device.OPTCOLS_LinkFieldInfo , false, Device.LinkFieldInfo); // startupInit.Device.BorderCrossingFieldInfo=true boolean devBC = Account.SupportsBorderCrossing(); addDBFields(tblName, fields, Device.OPTCOLS_BorderCrossingFieldInfo , devBC, Device.BorderCrossingFieldInfo); // startupInit.Device.MaintOdometerFieldInfo=true addDBFields(tblName, fields, Device.OPTCOLS_MaintOdometerFieldInfo , false, Device.MaintOdometerFieldInfo); // startupInit.Device.WorkOrderInfo=true addDBFields(tblName, fields, Device.OPTCOLS_WorkOrderInfo , false, Device.WorkOrderInfo); // startupInit.Device.DataPushInfo=true addDBFields(tblName, fields, Device.OPTCOLS_DataPushInfo , false, Device.DataPushInfo); return fields; } /* DeviceGroup */ if (tblName.equalsIgnoreCase(DeviceGroup.TABLE_NAME())) { // startupInit.DeviceGroup.WorkOrderInfo=true addDBFields(tblName, fields, DeviceGroup.OPTCOLS_WorkOrderInfo , false, DeviceGroup.WorkOrderInfo); return fields; } /* EventData */ if (tblName.equalsIgnoreCase(EventData.TABLE_NAME())) { // startupInit.EventData.AutoIncrementIndex=true addDBFields(tblName, fields, EventData.OPTCOLS_AutoIncrementIndex , false, EventData.AutoIncrementIndex); // startupInit.EventData.CreationTimeMillisecond=true addDBFields(tblName, fields, EventData.OPTCOLS_CreationTimeMillisecond , false, EventData.CreationTimeMillisecond); // startupInit.EventData.AddressFieldInfo=true addDBFields(tblName, fields, EventData.OPTCOLS_AddressFieldInfo , false, EventData.AddressFieldInfo); // startupInit.EventData.GPSFieldInfo=true addDBFields(tblName, fields, EventData.OPTCOLS_GPSFieldInfo , false, EventData.GPSFieldInfo); // startupInit.EventData.CustomFieldInfo=true addDBFields(tblName, fields, EventData.OPTCOLS_CustomFieldInfo , false, EventData.CustomFieldInfo); // startupInit.EventData.GarminFieldInfo=true addDBFields(tblName, fields, EventData.OPTCOLS_GarminFieldInfo , false, EventData.GarminFieldInfo); // startupInit.EventData.CANBUSFieldInfo=true addDBFields(tblName, fields, EventData.OPTCOLS_CANBUSFieldInfo , false, EventData.CANBUSFieldInfo); // startupInit.EventData.AtmosphereFieldInfo=true addDBFields(tblName, fields, EventData.OPTCOLS_AtmosphereFieldInfo , false, EventData.AtmosphereFieldInfo); // startupInit.EventData.ThermoFieldInfo=true addDBFields(tblName, fields, EventData.OPTCOLS_ThermoFieldInfo , false, EventData.ThermoFieldInfo, 4); // startupInit.EventData.AnalogFieldInfo=true addDBFields(tblName, fields, EventData.OPTCOLS_AnalogFieldInfo , false, EventData.AnalogFieldInfo); // startupInit.EventData.EndOfDaySummary=true addDBFields(tblName, fields, EventData.OPTCOLS_EndOfDaySummary , false, EventData.EndOfDaySummary); // startupInit.EventData.ServingCellTowerData=true addDBFields(tblName, fields, EventData.OPTCOLS_ServingCellTowerData , false, EventData.ServingCellTowerData); // startupInit.EventData.NeighborCellTowerData=true addDBFields(tblName, fields, EventData.OPTCOLS_NeighborCellTowerData , false, EventData.NeighborCellTowerData); // startupInit.EventData.WorkZoneGridData=true addDBFields(tblName, fields, EventData.OPTCOLS_WorkZoneGridData , false, EventData.WorkZoneGridData); // startupInit.EventData.LeaseRentalInfo=true addDBFields(tblName, fields, EventData.OPTCOLS_LeaseRentalData , false, EventData.LeaseRentalData); return fields; } /* Geozone */ if (tblName.equalsIgnoreCase(Geozone.TABLE_NAME())) { // startupInit.Geozone.PriorityFieldInfo addDBFields(tblName, fields, "startupInit.Geozone.PriorityFieldInfo" , false, Geozone.PriorityFieldInfo); // startupInit.Geozone.CorridorFieldInfo addDBFields(tblName, fields, "startupInit.Geozone.CorridorFieldInfo" , false, Geozone.CorridorFieldInfo); return fields; } /* leave as-is */ return fields; } // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ /** *** Add the specified fields to the table *** @param tblName The table name *** @param tblFields The list of table fields *** @param key The boolean key used to check for permission to add these fields *** @param defaultAdd The default if the property is not explicitly specfified *** @param customFields The fields to add, assuming the boolean key returns true. **/ protected void addDBFields(String tblName, java.util.List<DBField> tblFields, String key, boolean defaultAdd, DBField customFields[]) { this.addDBFields(tblName, tblFields, key, defaultAdd, customFields, -1); } /** *** Add the specified fields to the table *** @param tblName The table name *** @param tblFields The list of table fields *** @param key The boolean key used to check for permission to add these fields *** @param defaultAdd The default if the property is not explicitly specfified *** @param customFields The fields to add, assuming the boolean key returns true. *** @param maxCount The maximum number of fields to add from the customFields array **/ protected void addDBFields(String tblName, java.util.List<DBField> tblFields, String key, boolean defaultAdd, DBField customFields[], int maxCount) { /* add additional fields? */ boolean addFields = false; if (StringTools.isBlank(key)) { addFields = defaultAdd; } else if (maxCount >= 0) { String keyVal = RTConfig.getString(key,null); // yes|no|true|false|0|1|2|3|4|5|6|7|9 int valInt = StringTools.isInt(keyVal,true)? StringTools.parseInt(keyVal,-1) : -1; if (valInt > 0) { maxCount = valInt; addFields = true; } else { addFields = (maxCount > 0)? RTConfig.getBoolean(key,defaultAdd) : false; } //if (addFields && // tblName.equalsIgnoreCase(EventData.TABLE_NAME()) && // key.equals(EventData.OPTCOLS_ThermoFieldInfo) ) { // EventData.setThermoCount(maxCount); //} } else { addFields = RTConfig.getBoolean(key,defaultAdd); } /* add fields */ if (addFields) { int cnt = ((maxCount >= 0) && (maxCount <= customFields.length))? maxCount : customFields.length; for (int i = 0; i < cnt; i++) { tblFields.add(customFields[i]); } } } // ------------------------------------------------------------------------ }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.gcp.bigtable; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.bigtable.v2.MutateRowResponse; import com.google.bigtable.v2.Mutation; import com.google.bigtable.v2.Row; import com.google.bigtable.v2.RowFilter; import com.google.bigtable.v2.SampleRowKeysResponse; import com.google.cloud.bigtable.config.BigtableOptions; import com.google.cloud.bigtable.config.CredentialOptions; import com.google.cloud.bigtable.config.CredentialOptions.CredentialType; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.protobuf.ByteString; import java.io.IOException; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.ConcurrentLinkedQueue; import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.extensions.gcp.options.GcpOptions; import org.apache.beam.sdk.extensions.protobuf.ProtoCoder; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.BoundedSource.BoundedReader; import org.apache.beam.sdk.io.range.ByteKey; import org.apache.beam.sdk.io.range.ByteKeyRange; import org.apache.beam.sdk.io.range.ByteKeyRangeTracker; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.runners.PipelineRunner; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.util.ReleaseInfo; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A bounded source and sink for Google Cloud Bigtable. * * <p>For more information about Cloud Bigtable, see the online documentation at * <a href="https://cloud.google.com/bigtable/">Google Cloud Bigtable</a>. * * <h3>Reading from Cloud Bigtable</h3> * * <p>The Bigtable source returns a set of rows from a single table, returning a * {@code PCollection<Row>}. * * <p>To configure a Cloud Bigtable source, you must supply a table id and a {@link BigtableOptions} * or builder configured with the project and other information necessary to identify the * Bigtable instance. By default, {@link BigtableIO.Read} will read all rows in the table. The row * range to be read can optionally be restricted using {@link BigtableIO.Read#withKeyRange}, and * a {@link RowFilter} can be specified using {@link BigtableIO.Read#withRowFilter}. For example: * * <pre>{@code * BigtableOptions.Builder optionsBuilder = * new BigtableOptions.Builder() * .setProjectId("project") * .setInstanceId("instance"); * * Pipeline p = ...; * * // Scan the entire table. * p.apply("read", * BigtableIO.read() * .withBigtableOptions(optionsBuilder) * .withTableId("table")); * * // Scan a prefix of the table. * ByteKeyRange keyRange = ...; * p.apply("read", * BigtableIO.read() * .withBigtableOptions(optionsBuilder) * .withTableId("table") * .withKeyRange(keyRange)); * * // Scan a subset of rows that match the specified row filter. * p.apply("filtered read", * BigtableIO.read() * .withBigtableOptions(optionsBuilder) * .withTableId("table") * .withRowFilter(filter)); * }</pre> * * <h3>Writing to Cloud Bigtable</h3> * * <p>The Bigtable sink executes a set of row mutations on a single table. It takes as input a * {@link PCollection PCollection&lt;KV&lt;ByteString, Iterable&lt;Mutation&gt;&gt;&gt;}, where the * {@link ByteString} is the key of the row being mutated, and each {@link Mutation} represents an * idempotent transformation to that row. * * <p>To configure a Cloud Bigtable sink, you must supply a table id and a {@link BigtableOptions} * or builder configured with the project and other information necessary to identify the * Bigtable instance, for example: * * <pre>{@code * BigtableOptions.Builder optionsBuilder = * new BigtableOptions.Builder() * .setProjectId("project") * .setInstanceId("instance"); * * PCollection<KV<ByteString, Iterable<Mutation>>> data = ...; * * data.apply("write", * BigtableIO.write() * .withBigtableOptions(optionsBuilder) * .withTableId("table")); * }</pre> * * <h3>Experimental</h3> * * <p>This connector for Cloud Bigtable is considered experimental and may break or receive * backwards-incompatible changes in future versions of the Apache Beam SDK. Cloud Bigtable is * in Beta, and thus it may introduce breaking changes in future revisions of its service or APIs. * * <h3>Permissions</h3> * * <p>Permission requirements depend on the {@link PipelineRunner} that is used to execute the * pipeline. Please refer to the documentation of corresponding * {@link PipelineRunner PipelineRunners} for more details. */ @Experimental public class BigtableIO { private static final Logger LOG = LoggerFactory.getLogger(BigtableIO.class); /** * Creates an uninitialized {@link BigtableIO.Read}. Before use, the {@code Read} must be * initialized with a * {@link BigtableIO.Read#withBigtableOptions(BigtableOptions) BigtableOptions} that specifies * the source Cloud Bigtable instance, and a {@link BigtableIO.Read#withTableId tableId} that * specifies which table to read. A {@link RowFilter} may also optionally be specified using * {@link BigtableIO.Read#withRowFilter}. */ @Experimental public static Read read() { return new Read(null, "", ByteKeyRange.ALL_KEYS, null, null); } /** * Creates an uninitialized {@link BigtableIO.Write}. Before use, the {@code Write} must be * initialized with a * {@link BigtableIO.Write#withBigtableOptions(BigtableOptions) BigtableOptions} that specifies * the destination Cloud Bigtable instance, and a {@link BigtableIO.Write#withTableId tableId} * that specifies which table to write. */ @Experimental public static Write write() { return new Write(null, "", null); } /** * A {@link PTransform} that reads from Google Cloud Bigtable. See the class-level Javadoc on * {@link BigtableIO} for more information. * * @see BigtableIO */ @Experimental public static class Read extends PTransform<PBegin, PCollection<Row>> { /** * Returns a new {@link BigtableIO.Read} that will read from the Cloud Bigtable instance * indicated by the given options, and using any other specified customizations. * * <p>Does not modify this object. */ public Read withBigtableOptions(BigtableOptions options) { checkNotNull(options, "options"); return withBigtableOptions(options.toBuilder()); } /** * Returns a new {@link BigtableIO.Read} that will read from the Cloud Bigtable instance * indicated by the given options, and using any other specified customizations. * * <p>Clones the given {@link BigtableOptions} builder so that any further changes * will have no effect on the returned {@link BigtableIO.Read}. * * <p>Does not modify this object. */ public Read withBigtableOptions(BigtableOptions.Builder optionsBuilder) { checkNotNull(optionsBuilder, "optionsBuilder"); // TODO: is there a better way to clone a Builder? Want it to be immune from user changes. BigtableOptions options = optionsBuilder.build(); BigtableOptions.Builder clonedBuilder = options.toBuilder() .setUseCachedDataPool(true); BigtableOptions optionsWithAgent = clonedBuilder.setUserAgent(getBeamSdkPartOfUserAgent()).build(); return new Read(optionsWithAgent, tableId, keyRange, filter, bigtableService); } /** * Returns a new {@link BigtableIO.Read} that will filter the rows read from Cloud Bigtable * using the given row filter. * * <p>Does not modify this object. */ public Read withRowFilter(RowFilter filter) { checkNotNull(filter, "filter"); return new Read(options, tableId, keyRange, filter, bigtableService); } /** * Returns a new {@link BigtableIO.Read} that will read only rows in the specified range. * * <p>Does not modify this object. */ public Read withKeyRange(ByteKeyRange keyRange) { checkNotNull(keyRange, "keyRange"); return new Read(options, tableId, keyRange, filter, bigtableService); } /** * Returns a new {@link BigtableIO.Read} that will read from the specified table. * * <p>Does not modify this object. */ public Read withTableId(String tableId) { checkNotNull(tableId, "tableId"); return new Read(options, tableId, keyRange, filter, bigtableService); } /** * Returns the Google Cloud Bigtable instance being read from, and other parameters. */ public BigtableOptions getBigtableOptions() { return options; } /** * Returns the range of keys that will be read from the table. By default, returns * {@link ByteKeyRange#ALL_KEYS} to scan the entire table. */ public ByteKeyRange getKeyRange() { return keyRange; } /** * Returns the table being read from. */ public String getTableId() { return tableId; } @Override public PCollection<Row> expand(PBegin input) { BigtableSource source = new BigtableSource(new SerializableFunction<PipelineOptions, BigtableService>() { @Override public BigtableService apply(PipelineOptions options) { return getBigtableService(options); } }, tableId, filter, keyRange, null); return input.getPipeline().apply(org.apache.beam.sdk.io.Read.from(source)); } @Override public void validate(PBegin input) { checkArgument(options != null, "BigtableOptions not specified"); checkArgument(!tableId.isEmpty(), "Table ID not specified"); try { checkArgument( getBigtableService(input.getPipeline().getOptions()).tableExists(tableId), "Table %s does not exist", tableId); } catch (IOException e) { LOG.warn("Error checking whether table {} exists; proceeding.", tableId, e); } } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); builder.add(DisplayData.item("tableId", tableId) .withLabel("Table ID")); if (options != null) { builder.add(DisplayData.item("bigtableOptions", options.toString()) .withLabel("Bigtable Options")); } builder.addIfNotDefault( DisplayData.item("keyRange", keyRange.toString()), ByteKeyRange.ALL_KEYS.toString()); if (filter != null) { builder.add(DisplayData.item("rowFilter", filter.toString()) .withLabel("Table Row Filter")); } } @Override public String toString() { return MoreObjects.toStringHelper(Read.class) .add("options", options) .add("tableId", tableId) .add("keyRange", keyRange) .add("filter", filter) .toString(); } ///////////////////////////////////////////////////////////////////////////////////////// /** * Used to define the Cloud Bigtable instance and any options for the networking layer. * Cannot actually be {@code null} at validation time, but may start out {@code null} while * source is being built. */ @Nullable private final BigtableOptions options; private final String tableId; private final ByteKeyRange keyRange; @Nullable private final RowFilter filter; @Nullable private final BigtableService bigtableService; private Read( @Nullable BigtableOptions options, String tableId, ByteKeyRange keyRange, @Nullable RowFilter filter, @Nullable BigtableService bigtableService) { this.options = options; this.tableId = checkNotNull(tableId, "tableId"); this.keyRange = checkNotNull(keyRange, "keyRange"); this.filter = filter; this.bigtableService = bigtableService; } /** * Returns a new {@link BigtableIO.Read} that will read using the given Cloud Bigtable * service implementation. * * <p>This is used for testing. * * <p>Does not modify this object. */ Read withBigtableService(BigtableService bigtableService) { checkNotNull(bigtableService, "bigtableService"); return new Read(options, tableId, keyRange, filter, bigtableService); } /** * Helper function that either returns the mock Bigtable service supplied by * {@link #withBigtableService} or creates and returns an implementation that talks to * {@code Cloud Bigtable}. * * <p>Also populate the credentials option from {@link GcpOptions#getGcpCredential()} if the * default credentials are being used on {@link BigtableOptions}. */ @VisibleForTesting BigtableService getBigtableService(PipelineOptions pipelineOptions) { if (bigtableService != null) { return bigtableService; } BigtableOptions.Builder clonedOptions = options.toBuilder(); if (options.getCredentialOptions().getCredentialType() == CredentialType.DefaultCredentials) { clonedOptions.setCredentialOptions( CredentialOptions.credential( pipelineOptions.as(GcpOptions.class).getGcpCredential())); } return new BigtableServiceImpl(clonedOptions.build()); } } /** * A {@link PTransform} that writes to Google Cloud Bigtable. See the class-level Javadoc on * {@link BigtableIO} for more information. * * @see BigtableIO */ @Experimental public static class Write extends PTransform<PCollection<KV<ByteString, Iterable<Mutation>>>, PDone> { /** * Used to define the Cloud Bigtable instance and any options for the networking layer. * Cannot actually be {@code null} at validation time, but may start out {@code null} while * source is being built. */ @Nullable private final BigtableOptions options; private final String tableId; @Nullable private final BigtableService bigtableService; private Write( @Nullable BigtableOptions options, String tableId, @Nullable BigtableService bigtableService) { this.options = options; this.tableId = checkNotNull(tableId, "tableId"); this.bigtableService = bigtableService; } /** * Returns a new {@link BigtableIO.Write} that will write to the Cloud Bigtable instance * indicated by the given options, and using any other specified customizations. * * <p>Does not modify this object. */ public Write withBigtableOptions(BigtableOptions options) { checkNotNull(options, "options"); return withBigtableOptions(options.toBuilder()); } /** * Returns a new {@link BigtableIO.Write} that will write to the Cloud Bigtable instance * indicated by the given options, and using any other specified customizations. * * <p>Clones the given {@link BigtableOptions} builder so that any further changes * will have no effect on the returned {@link BigtableIO.Write}. * * <p>Does not modify this object. */ public Write withBigtableOptions(BigtableOptions.Builder optionsBuilder) { checkNotNull(optionsBuilder, "optionsBuilder"); // TODO: is there a better way to clone a Builder? Want it to be immune from user changes. BigtableOptions options = optionsBuilder.build(); // Set useBulkApi to true for enabling bulk writes BigtableOptions.Builder clonedBuilder = options.toBuilder() .setBulkOptions( options.getBulkOptions().toBuilder() .setUseBulkApi(true) .build()) .setUseCachedDataPool(true); BigtableOptions optionsWithAgent = clonedBuilder.setUserAgent(getBeamSdkPartOfUserAgent()).build(); return new Write(optionsWithAgent, tableId, bigtableService); } /** * Returns a new {@link BigtableIO.Write} that will write to the specified table. * * <p>Does not modify this object. */ public Write withTableId(String tableId) { checkNotNull(tableId, "tableId"); return new Write(options, tableId, bigtableService); } /** * Returns the Google Cloud Bigtable instance being written to, and other parameters. */ public BigtableOptions getBigtableOptions() { return options; } /** * Returns the table being written to. */ public String getTableId() { return tableId; } @Override public PDone expand(PCollection<KV<ByteString, Iterable<Mutation>>> input) { input.apply(ParDo.of(new BigtableWriterFn(tableId, new SerializableFunction<PipelineOptions, BigtableService>() { @Override public BigtableService apply(PipelineOptions options) { return getBigtableService(options); } }))); return PDone.in(input.getPipeline()); } @Override public void validate(PCollection<KV<ByteString, Iterable<Mutation>>> input) { checkArgument(options != null, "BigtableOptions not specified"); checkArgument(!tableId.isEmpty(), "Table ID not specified"); try { checkArgument( getBigtableService(input.getPipeline().getOptions()).tableExists(tableId), "Table %s does not exist", tableId); } catch (IOException e) { LOG.warn("Error checking whether table {} exists; proceeding.", tableId, e); } } /** * Returns a new {@link BigtableIO.Write} that will write using the given Cloud Bigtable * service implementation. * * <p>This is used for testing. * * <p>Does not modify this object. */ Write withBigtableService(BigtableService bigtableService) { checkNotNull(bigtableService, "bigtableService"); return new Write(options, tableId, bigtableService); } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); builder.add(DisplayData.item("tableId", tableId) .withLabel("Table ID")); if (options != null) { builder.add(DisplayData.item("bigtableOptions", options.toString()) .withLabel("Bigtable Options")); } } @Override public String toString() { return MoreObjects.toStringHelper(Write.class) .add("options", options) .add("tableId", tableId) .toString(); } /** * Helper function that either returns the mock Bigtable service supplied by * {@link #withBigtableService} or creates and returns an implementation that talks to * {@code Cloud Bigtable}. * * <p>Also populate the credentials option from {@link GcpOptions#getGcpCredential()} if the * default credentials are being used on {@link BigtableOptions}. */ @VisibleForTesting BigtableService getBigtableService(PipelineOptions pipelineOptions) { if (bigtableService != null) { return bigtableService; } BigtableOptions.Builder clonedOptions = options.toBuilder(); if (options.getCredentialOptions().getCredentialType() == CredentialType.DefaultCredentials) { clonedOptions.setCredentialOptions( CredentialOptions.credential( pipelineOptions.as(GcpOptions.class).getGcpCredential())); } return new BigtableServiceImpl(clonedOptions.build()); } private class BigtableWriterFn extends DoFn<KV<ByteString, Iterable<Mutation>>, Void> { public BigtableWriterFn(String tableId, SerializableFunction<PipelineOptions, BigtableService> bigtableServiceFactory) { this.tableId = checkNotNull(tableId, "tableId"); this.bigtableServiceFactory = checkNotNull(bigtableServiceFactory, "bigtableServiceFactory"); this.failures = new ConcurrentLinkedQueue<>(); } @StartBundle public void startBundle(Context c) throws IOException { if (bigtableWriter == null) { bigtableWriter = bigtableServiceFactory.apply( c.getPipelineOptions()).openForWriting(tableId); } recordsWritten = 0; } @ProcessElement public void processElement(ProcessContext c) throws Exception { checkForFailures(); Futures.addCallback( bigtableWriter.writeRecord(c.element()), new WriteExceptionCallback(c.element())); ++recordsWritten; } @FinishBundle public void finishBundle(Context c) throws Exception { bigtableWriter.flush(); checkForFailures(); LOG.info("Wrote {} records", recordsWritten); } @Teardown public void tearDown() throws Exception { bigtableWriter.close(); bigtableWriter = null; } @Override public void populateDisplayData(DisplayData.Builder builder) { builder.delegate(Write.this); } /////////////////////////////////////////////////////////////////////////////// private final String tableId; private final SerializableFunction<PipelineOptions, BigtableService> bigtableServiceFactory; private BigtableService.Writer bigtableWriter; private long recordsWritten; private final ConcurrentLinkedQueue<BigtableWriteException> failures; /** * If any write has asynchronously failed, fail the bundle with a useful error. */ private void checkForFailures() throws IOException { // Note that this function is never called by multiple threads and is the only place that // we remove from failures, so this code is safe. if (failures.isEmpty()) { return; } StringBuilder logEntry = new StringBuilder(); int i = 0; for (; i < 10 && !failures.isEmpty(); ++i) { BigtableWriteException exc = failures.remove(); logEntry.append("\n").append(exc.getMessage()); if (exc.getCause() != null) { logEntry.append(": ").append(exc.getCause().getMessage()); } } String message = String.format( "At least %d errors occurred writing to Bigtable. First %d errors: %s", i + failures.size(), i, logEntry.toString()); LOG.error(message); throw new IOException(message); } private class WriteExceptionCallback implements FutureCallback<MutateRowResponse> { private final KV<ByteString, Iterable<Mutation>> value; public WriteExceptionCallback(KV<ByteString, Iterable<Mutation>> value) { this.value = value; } @Override public void onFailure(Throwable cause) { failures.add(new BigtableWriteException(value, cause)); } @Override public void onSuccess(MutateRowResponse produced) {} } } } ////////////////////////////////////////////////////////////////////////////////////////// /** Disallow construction of utility class. */ private BigtableIO() {} private static ByteKey makeByteKey(ByteString key) { return ByteKey.copyFrom(key.asReadOnlyByteBuffer()); } static class BigtableSource extends BoundedSource<Row> { public BigtableSource( SerializableFunction<PipelineOptions, BigtableService> serviceFactory, String tableId, @Nullable RowFilter filter, ByteKeyRange range, @Nullable Long estimatedSizeBytes) { this.serviceFactory = serviceFactory; this.tableId = tableId; this.filter = filter; this.range = range; this.estimatedSizeBytes = estimatedSizeBytes; } @Override public String toString() { return MoreObjects.toStringHelper(BigtableSource.class) .add("tableId", tableId) .add("filter", filter) .add("range", range) .add("estimatedSizeBytes", estimatedSizeBytes) .toString(); } ////// Private state and internal implementation details ////// private final SerializableFunction<PipelineOptions, BigtableService> serviceFactory; private final String tableId; @Nullable private final RowFilter filter; private final ByteKeyRange range; @Nullable private Long estimatedSizeBytes; @Nullable private transient List<SampleRowKeysResponse> sampleRowKeys; protected BigtableSource withStartKey(ByteKey startKey) { checkNotNull(startKey, "startKey"); return new BigtableSource( serviceFactory, tableId, filter, range.withStartKey(startKey), estimatedSizeBytes); } protected BigtableSource withEndKey(ByteKey endKey) { checkNotNull(endKey, "endKey"); return new BigtableSource( serviceFactory, tableId, filter, range.withEndKey(endKey), estimatedSizeBytes); } protected BigtableSource withEstimatedSizeBytes(Long estimatedSizeBytes) { checkNotNull(estimatedSizeBytes, "estimatedSizeBytes"); return new BigtableSource(serviceFactory, tableId, filter, range, estimatedSizeBytes); } /** * Makes an API call to the Cloud Bigtable service that gives information about tablet key * boundaries and estimated sizes. We can use these samples to ensure that splits are on * different tablets, and possibly generate sub-splits within tablets. */ private List<SampleRowKeysResponse> getSampleRowKeys(PipelineOptions pipelineOptions) throws IOException { return serviceFactory.apply(pipelineOptions).getSampleRowKeys(this); } @Override public List<BigtableSource> split( long desiredBundleSizeBytes, PipelineOptions options) throws Exception { // Update the desiredBundleSizeBytes in order to limit the // number of splits to maximumNumberOfSplits. long maximumNumberOfSplits = 4000; long sizeEstimate = getEstimatedSizeBytes(options); desiredBundleSizeBytes = Math.max(sizeEstimate / maximumNumberOfSplits, desiredBundleSizeBytes); // Delegate to testable helper. return splitBasedOnSamples(desiredBundleSizeBytes, getSampleRowKeys(options)); } /** Helper that splits this source into bundles based on Cloud Bigtable sampled row keys. */ private List<BigtableSource> splitBasedOnSamples( long desiredBundleSizeBytes, List<SampleRowKeysResponse> sampleRowKeys) { // There are no regions, or no samples available. Just scan the entire range. if (sampleRowKeys.isEmpty()) { LOG.info("Not splitting source {} because no sample row keys are available.", this); return Collections.singletonList(this); } LOG.info( "About to split into bundles of size {} with sampleRowKeys length {} first element {}", desiredBundleSizeBytes, sampleRowKeys.size(), sampleRowKeys.get(0)); // Loop through all sampled responses and generate splits from the ones that overlap the // scan range. The main complication is that we must track the end range of the previous // sample to generate good ranges. ByteKey lastEndKey = ByteKey.EMPTY; long lastOffset = 0; ImmutableList.Builder<BigtableSource> splits = ImmutableList.builder(); for (SampleRowKeysResponse response : sampleRowKeys) { ByteKey responseEndKey = makeByteKey(response.getRowKey()); long responseOffset = response.getOffsetBytes(); checkState( responseOffset >= lastOffset, "Expected response byte offset %s to come after the last offset %s", responseOffset, lastOffset); if (!range.overlaps(ByteKeyRange.of(lastEndKey, responseEndKey))) { // This region does not overlap the scan, so skip it. lastOffset = responseOffset; lastEndKey = responseEndKey; continue; } // Calculate the beginning of the split as the larger of startKey and the end of the last // split. Unspecified start is smallest key so is correctly treated as earliest key. ByteKey splitStartKey = lastEndKey; if (splitStartKey.compareTo(range.getStartKey()) < 0) { splitStartKey = range.getStartKey(); } // Calculate the end of the split as the smaller of endKey and the end of this sample. Note // that range.containsKey handles the case when range.getEndKey() is empty. ByteKey splitEndKey = responseEndKey; if (!range.containsKey(splitEndKey)) { splitEndKey = range.getEndKey(); } // We know this region overlaps the desired key range, and we know a rough estimate of its // size. Split the key range into bundle-sized chunks and then add them all as splits. long sampleSizeBytes = responseOffset - lastOffset; List<BigtableSource> subSplits = splitKeyRangeIntoBundleSizedSubranges( sampleSizeBytes, desiredBundleSizeBytes, ByteKeyRange.of(splitStartKey, splitEndKey)); splits.addAll(subSplits); // Move to the next region. lastEndKey = responseEndKey; lastOffset = responseOffset; } // We must add one more region after the end of the samples if both these conditions hold: // 1. we did not scan to the end yet (lastEndKey is concrete, not 0-length). // 2. we want to scan to the end (endKey is empty) or farther (lastEndKey < endKey). if (!lastEndKey.isEmpty() && (range.getEndKey().isEmpty() || lastEndKey.compareTo(range.getEndKey()) < 0)) { splits.add(this.withStartKey(lastEndKey).withEndKey(range.getEndKey())); } List<BigtableSource> ret = splits.build(); LOG.info("Generated {} splits. First split: {}", ret.size(), ret.get(0)); return ret; } @Override public long getEstimatedSizeBytes(PipelineOptions options) throws IOException { // Delegate to testable helper. if (estimatedSizeBytes == null) { estimatedSizeBytes = getEstimatedSizeBytesBasedOnSamples(getSampleRowKeys(options)); } return estimatedSizeBytes; } /** * Computes the estimated size in bytes based on the total size of all samples that overlap * the key range this source will scan. */ private long getEstimatedSizeBytesBasedOnSamples(List<SampleRowKeysResponse> samples) { long estimatedSizeBytes = 0; long lastOffset = 0; ByteKey currentStartKey = ByteKey.EMPTY; // Compute the total estimated size as the size of each sample that overlaps the scan range. // TODO: In future, Bigtable service may provide finer grained APIs, e.g., to sample given a // filter or to sample on a given key range. for (SampleRowKeysResponse response : samples) { ByteKey currentEndKey = makeByteKey(response.getRowKey()); long currentOffset = response.getOffsetBytes(); if (!currentStartKey.isEmpty() && currentStartKey.equals(currentEndKey)) { // Skip an empty region. lastOffset = currentOffset; continue; } else if (range.overlaps(ByteKeyRange.of(currentStartKey, currentEndKey))) { estimatedSizeBytes += currentOffset - lastOffset; } currentStartKey = currentEndKey; lastOffset = currentOffset; } return estimatedSizeBytes; } @Override public BoundedReader<Row> createReader(PipelineOptions options) throws IOException { return new BigtableReader(this, serviceFactory.apply(options)); } @Override public void validate() { checkArgument(!tableId.isEmpty(), "tableId cannot be empty"); } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); builder.add(DisplayData.item("tableId", tableId) .withLabel("Table ID")); if (filter != null) { builder.add(DisplayData.item("rowFilter", filter.toString()) .withLabel("Table Row Filter")); } } @Override public Coder<Row> getDefaultOutputCoder() { return ProtoCoder.of(Row.class); } /** Helper that splits the specified range in this source into bundles. */ private List<BigtableSource> splitKeyRangeIntoBundleSizedSubranges( long sampleSizeBytes, long desiredBundleSizeBytes, ByteKeyRange range) { // Catch the trivial cases. Split is small enough already, or this is the last region. LOG.debug( "Subsplit for sampleSizeBytes {} and desiredBundleSizeBytes {}", sampleSizeBytes, desiredBundleSizeBytes); if (sampleSizeBytes <= desiredBundleSizeBytes) { return Collections.singletonList( this.withStartKey(range.getStartKey()).withEndKey(range.getEndKey())); } checkArgument( sampleSizeBytes > 0, "Sample size %s bytes must be greater than 0.", sampleSizeBytes); checkArgument( desiredBundleSizeBytes > 0, "Desired bundle size %s bytes must be greater than 0.", desiredBundleSizeBytes); int splitCount = (int) Math.ceil(((double) sampleSizeBytes) / (desiredBundleSizeBytes)); List<ByteKey> splitKeys = range.split(splitCount); ImmutableList.Builder<BigtableSource> splits = ImmutableList.builder(); Iterator<ByteKey> keys = splitKeys.iterator(); ByteKey prev = keys.next(); while (keys.hasNext()) { ByteKey next = keys.next(); splits.add( this .withStartKey(prev) .withEndKey(next) .withEstimatedSizeBytes(sampleSizeBytes / splitCount)); prev = next; } return splits.build(); } public ByteKeyRange getRange() { return range; } public RowFilter getRowFilter() { return filter; } public String getTableId() { return tableId; } } private static class BigtableReader extends BoundedReader<Row> { // Thread-safety: source is protected via synchronization and is only accessed or modified // inside a synchronized block (or constructor, which is the same). private BigtableSource source; private BigtableService service; private BigtableService.Reader reader; private final ByteKeyRangeTracker rangeTracker; private long recordsReturned; public BigtableReader(BigtableSource source, BigtableService service) { this.source = source; this.service = service; rangeTracker = ByteKeyRangeTracker.of(source.getRange()); } @Override public boolean start() throws IOException { reader = service.createReader(getCurrentSource()); boolean hasRecord = reader.start() && rangeTracker.tryReturnRecordAt(true, makeByteKey(reader.getCurrentRow().getKey())) || rangeTracker.markDone(); if (hasRecord) { ++recordsReturned; } return hasRecord; } @Override public synchronized BigtableSource getCurrentSource() { return source; } @Override public boolean advance() throws IOException { boolean hasRecord = reader.advance() && rangeTracker.tryReturnRecordAt(true, makeByteKey(reader.getCurrentRow().getKey())) || rangeTracker.markDone(); if (hasRecord) { ++recordsReturned; } return hasRecord; } @Override public Row getCurrent() throws NoSuchElementException { return reader.getCurrentRow(); } @Override public void close() throws IOException { LOG.info("Closing reader after reading {} records.", recordsReturned); if (reader != null) { reader.close(); reader = null; } } @Override public final Double getFractionConsumed() { return rangeTracker.getFractionConsumed(); } @Override public final long getSplitPointsConsumed() { return rangeTracker.getSplitPointsConsumed(); } @Override @Nullable public final synchronized BigtableSource splitAtFraction(double fraction) { ByteKey splitKey; try { splitKey = rangeTracker.getRange().interpolateKey(fraction); } catch (RuntimeException e) { LOG.info( "{}: Failed to interpolate key for fraction {}.", rangeTracker.getRange(), fraction, e); return null; } LOG.debug( "Proposing to split {} at fraction {} (key {})", rangeTracker, fraction, splitKey); BigtableSource primary; BigtableSource residual; try { primary = source.withEndKey(splitKey); residual = source.withStartKey(splitKey); } catch (RuntimeException e) { LOG.info( "{}: Interpolating for fraction {} yielded invalid split key {}.", rangeTracker.getRange(), fraction, splitKey, e); return null; } if (!rangeTracker.trySplitAtPosition(splitKey)) { return null; } this.source = primary; return residual; } } /** * An exception that puts information about the failed record being written in its message. */ static class BigtableWriteException extends IOException { public BigtableWriteException(KV<ByteString, Iterable<Mutation>> record, Throwable cause) { super( String.format( "Error mutating row %s with mutations %s", record.getKey().toStringUtf8(), record.getValue()), cause); } } /** * A helper function to produce a Cloud Bigtable user agent string. This need only include * information about the Apache Beam SDK itself, because Bigtable will automatically append * other relevant system and Bigtable client-specific version information. * * @see com.google.cloud.bigtable.config.BigtableVersionInfo */ private static String getBeamSdkPartOfUserAgent() { ReleaseInfo info = ReleaseInfo.getReleaseInfo(); return String.format("%s/%s", info.getName(), info.getVersion()) .replace(" ", "_"); } }
/* * For work developed by the HSQL Development Group: * * Copyright (c) 2001-2011, The HSQL Development Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the HSQL Development Group nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG, * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * * * For work originally developed by the Hypersonic SQL Group: * * Copyright (c) 1995-2000, The Hypersonic SQL Group. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the Hypersonic SQL Group nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE HYPERSONIC SQL GROUP, * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * This software consists of voluntary contributions made by many individuals * on behalf of the Hypersonic SQL Group. */ package org.hsqldb.index; import org.hsqldb.Row; import org.hsqldb.RowAVL; import org.hsqldb.RowAVLDisk; import org.hsqldb.lib.LongLookup; import org.hsqldb.persist.CachedObject; import org.hsqldb.persist.PersistentStore; import org.hsqldb.rowio.RowOutputInterface; import org.hsqldb.rowio.RowInputInterface; // fredt@users 20020221 - patch 513005 by sqlbob@users (RMP) // fredt@users 20020920 - path 1.7.1 - refactoring to cut mamory footprint // fredt@users 20021205 - path 1.7.2 - enhancements // fredt@users 20021215 - doc 1.7.2 - javadoc comments /** * The parent for all AVL node implementations. Subclasses of Node vary * in the way they hold * references to other Nodes in the AVL tree, or to their Row data.<br> * * nNext links the Node objects belonging to different indexes for each * table row. It is used solely by Row to locate the node belonging to a * particular index.<br> * * New class derived from Hypersonic SQL code and enhanced in HSQLDB. <p> * * @author Thomas Mueller (Hypersonic SQL Group) * @version 1.9.0 * @since Hypersonic SQL */ public class NodeAVL implements CachedObject { static final int NO_POS = RowAVLDisk.NO_POS; public int iBalance; public NodeAVL nNext; // node of next index (nNext==null || nNext.iId=iId+1) // protected NodeAVL nLeft; protected NodeAVL nRight; protected NodeAVL nParent; protected final Row row; NodeAVL() { row = null; } public NodeAVL(Row r) { row = r; } public void delete() { iBalance = 0; nLeft = nRight = nParent = null; } NodeAVL getLeft(PersistentStore store) { return nLeft; } NodeAVL setLeft(PersistentStore persistentStore, NodeAVL n) { nLeft = n; return this; } public int getBalance(PersistentStore store) { return iBalance; } boolean isLeft(NodeAVL node) { return nLeft == node; } boolean isRight(NodeAVL node) { return nRight == node; } NodeAVL getRight(PersistentStore persistentStore) { return nRight; } NodeAVL setRight(PersistentStore persistentStore, NodeAVL n) { nRight = n; return this; } NodeAVL getParent(PersistentStore store) { return nParent; } boolean isRoot(PersistentStore store) { return nParent == null; } NodeAVL setParent(PersistentStore persistentStore, NodeAVL n) { nParent = n; return this; } public NodeAVL setBalance(PersistentStore store, int b) { iBalance = b; return this; } boolean isFromLeft(PersistentStore store) { if (nParent == null) { return true; } return this == nParent.nLeft; } public NodeAVL child(PersistentStore store, boolean isleft) { return isleft ? getLeft(store) : getRight(store); } public NodeAVL set(PersistentStore store, boolean isLeft, NodeAVL n) { if (isLeft) { nLeft = n; } else { nRight = n; } if (n != null) { n.nParent = this; } return this; } public void replace(PersistentStore store, Index index, NodeAVL n) { if (nParent == null) { if (n != null) { n = n.setParent(store, null); } store.setAccessor(index, n); } else { nParent.set(store, isFromLeft(store), n); } } boolean equals(NodeAVL n) { return n == this; } public void setInMemory(boolean in) {} public int getDefaultCapacity() { return 0; } public void read(RowInputInterface in) {} public void write(RowOutputInterface out) {} public void write(RowOutputInterface out, LongLookup lookup) {} public long getPos() { return 0; } public RowAVL getRow(PersistentStore store) { return (RowAVL) row; } protected Object[] getData(PersistentStore store) { return row.getData(); } public void updateAccessCount(int count) {} public int getAccessCount() { return 0; } public void setStorageSize(int size) {} public int getStorageSize() { return 0; } final public boolean isBlock() { return false; } public void setPos(long pos) {} public boolean isNew() { return false; } public boolean hasChanged() { return false; } public boolean isKeepInMemory() { return false; } ; public boolean keepInMemory(boolean keep) { return true; } public boolean isInMemory() { return false; } public void restore() {} public void destroy() {} public int getRealSize(RowOutputInterface out) { return 0; } public boolean isMemory() { return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.expression.function; import java.io.DataInput; import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.phoenix.expression.Expression; import org.apache.phoenix.expression.LiteralExpression; import org.apache.phoenix.parse.FunctionParseNode.Argument; import org.apache.phoenix.parse.FunctionParseNode.BuiltInFunction; import org.apache.phoenix.schema.SortOrder; import org.apache.phoenix.schema.tuple.Tuple; import org.apache.phoenix.schema.types.PChar; import org.apache.phoenix.schema.types.PDataType; import org.apache.phoenix.schema.types.PLong; import org.apache.phoenix.schema.types.PVarchar; import org.apache.phoenix.util.StringUtil; /** * * Implementation of the {@code SUBSTR(<string>,<offset>[,<length>]) } built-in function * where {@code <offset> } is the offset from the start of {@code <string> }. A positive offset * is treated as 1-based, a zero offset is treated as 0-based, and a negative * offset starts from the end of the string working backwards. The optional * {@code <length> } argument is the number of characters to return. In the absence of the * {@code <length> } argument, the rest of the string starting from {@code <offset> } is returned. * If {@code <length> } is less than 1, null is returned. * * * @since 0.1 */ @BuiltInFunction(name=SubstrFunction.NAME, args={ @Argument(allowedTypes={PVarchar.class}), @Argument(allowedTypes={PLong.class}), // These are LONG because negative numbers end up as longs @Argument(allowedTypes={PLong.class},defaultValue="null")} ) public class SubstrFunction extends PrefixFunction { public static final String NAME = "SUBSTR"; private boolean hasLengthExpression; private boolean isOffsetConstant; private boolean isLengthConstant; private boolean isFixedWidth; private Integer maxLength; public SubstrFunction() { } public SubstrFunction(List<Expression> children) { super(children); init(); } private void init() { isOffsetConstant = getOffsetExpression() instanceof LiteralExpression; isLengthConstant = getLengthExpression() instanceof LiteralExpression; hasLengthExpression = !isLengthConstant || ((LiteralExpression)getLengthExpression()).getValue() != null; isFixedWidth = getStrExpression().getDataType().isFixedWidth() && ((hasLengthExpression && isLengthConstant) || (!hasLengthExpression && isOffsetConstant)); if (hasLengthExpression && isLengthConstant) { Integer maxLength = ((Number)((LiteralExpression)getLengthExpression()).getValue()).intValue(); this.maxLength = maxLength >= 0 ? maxLength : 0; } else if (isOffsetConstant) { Number offsetNumber = (Number)((LiteralExpression)getOffsetExpression()).getValue(); if (offsetNumber != null) { int offset = offsetNumber.intValue(); PDataType type = getStrExpression().getDataType(); if (type.isFixedWidth()) { if (offset >= 0) { Integer maxLength = getStrExpression().getMaxLength(); this.maxLength = maxLength - offset + (offset == 0 ? 0 : 1); } else { this.maxLength = -offset; } } } } } @Override public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) { Expression offsetExpression = getOffsetExpression(); if (!offsetExpression.evaluate(tuple, ptr)) { return false; } if (ptr.getLength()==0) { return true; } int offset = offsetExpression.getDataType().getCodec().decodeInt(ptr, offsetExpression.getSortOrder()); int length = -1; if (hasLengthExpression) { Expression lengthExpression = getLengthExpression(); if (!lengthExpression.evaluate(tuple, ptr)) { return false; } if (ptr.getLength()==0) { return true; } length = lengthExpression.getDataType().getCodec().decodeInt(ptr, lengthExpression.getSortOrder()); if (length <= 0) { return false; } } if (!getStrExpression().evaluate(tuple, ptr)) { return false; } if (ptr.getLength()==0) { return true; } boolean isCharType = getStrExpression().getDataType() == PChar.INSTANCE; SortOrder sortOrder = getStrExpression().getSortOrder(); int strlen = isCharType ? ptr.getLength() : StringUtil.calculateUTF8Length(ptr.get(), ptr.getOffset(), ptr.getLength(), sortOrder); // Account for 1 versus 0-based offset offset = offset - (offset <= 0 ? 0 : 1); if (offset < 0) { // Offset < 0 means get from end offset = strlen + offset; } if (offset < 0 || offset >= strlen) { return false; } int maxLength = strlen - offset; length = length == -1 ? maxLength : Math.min(length,maxLength); int byteOffset = isCharType ? offset : StringUtil.getByteLengthForUtf8SubStr(ptr.get(), ptr.getOffset(), offset, sortOrder); int byteLength = isCharType ? length : StringUtil.getByteLengthForUtf8SubStr(ptr.get(), ptr.getOffset() + byteOffset, length, sortOrder); ptr.set(ptr.get(), ptr.getOffset() + byteOffset, byteLength); return true; } @Override public PDataType getDataType() { // If fixed width, then return child expression type. // If not fixed width, then we don't know how big this will be across the board return isFixedWidth ? getStrExpression().getDataType() : PVarchar.INSTANCE; } @Override public boolean isNullable() { return getStrExpression().isNullable() || !isFixedWidth || getOffsetExpression().isNullable(); } @Override public Integer getMaxLength() { return maxLength; } @Override public SortOrder getSortOrder() { return getStrExpression().getSortOrder(); } @Override public void readFields(DataInput input) throws IOException { super.readFields(input); init(); } private Expression getStrExpression() { return children.get(0); } private Expression getOffsetExpression() { return children.get(1); } private Expression getLengthExpression() { return children.get(2); } @Override public OrderPreserving preservesOrder() { if (isOffsetConstant) { LiteralExpression literal = (LiteralExpression) getOffsetExpression(); Number offsetNumber = (Number) literal.getValue(); if (offsetNumber != null) { int offset = offsetNumber.intValue(); if ((offset == 0 || offset == 1) && (!hasLengthExpression || isLengthConstant)) { return OrderPreserving.YES_IF_LAST; } } } return OrderPreserving.NO; } @Override protected boolean extractNode() { return true; } @Override public String getName() { return NAME; } @Override public String toString() { StringBuilder buf = new StringBuilder(getName() + "("); if (children.size()==0) return buf.append(")").toString(); if (hasLengthExpression) { buf.append(getStrExpression()); buf.append(", "); buf.append(getOffsetExpression()); buf.append(", "); buf.append(getLengthExpression()); } else { buf.append(getStrExpression()); buf.append(", "); buf.append(getOffsetExpression()); } buf.append(")"); return buf.toString(); } }
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.example.mapdemo; import android.annotation.SuppressLint; import com.google.android.libraries.maps.GoogleMap; import com.google.android.libraries.maps.OnMapReadyCallback; import com.google.android.libraries.maps.SupportMapFragment; import android.Manifest; import android.content.pm.PackageManager; import android.os.Bundle; import androidx.core.app.ActivityCompat; import androidx.core.content.ContextCompat; import androidx.appcompat.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.CheckBox; import android.widget.Spinner; import android.widget.Toast; import static com.google.android.libraries.maps.GoogleMap.MAP_TYPE_HYBRID; import static com.google.android.libraries.maps.GoogleMap.MAP_TYPE_NONE; import static com.google.android.libraries.maps.GoogleMap.MAP_TYPE_NORMAL; import static com.google.android.libraries.maps.GoogleMap.MAP_TYPE_SATELLITE; import static com.google.android.libraries.maps.GoogleMap.MAP_TYPE_TERRAIN; /** * Demonstrates the different base layers of a map. */ public class LayersDemoActivity extends AppCompatActivity implements OnItemSelectedListener, OnMapReadyCallback, ActivityCompat.OnRequestPermissionsResultCallback { private static final int LOCATION_PERMISSION_REQUEST_CODE = 1; private GoogleMap mMap; private CheckBox mTrafficCheckbox; private CheckBox mMyLocationCheckbox; private CheckBox mBuildingsCheckbox; private CheckBox mIndoorCheckbox; private Spinner mSpinner; /** * Flag indicating whether a requested permission has been denied after returning in * {@link #onRequestPermissionsResult(int, String[], int[])}. */ private boolean mShowPermissionDeniedDialog = false; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.layers_demo); mSpinner = (Spinner) findViewById(R.id.layers_spinner); ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource( this, R.array.layers_array, android.R.layout.simple_spinner_item); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mSpinner.setAdapter(adapter); mSpinner.setOnItemSelectedListener(this); mTrafficCheckbox = (CheckBox) findViewById(R.id.traffic); mMyLocationCheckbox = (CheckBox) findViewById(R.id.my_location); mBuildingsCheckbox = (CheckBox) findViewById(R.id.buildings); mIndoorCheckbox = (CheckBox) findViewById(R.id.indoor); SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map); mapFragment.getMapAsync(this); } @Override public void onMapReady(GoogleMap map) { mMap = map; updateMapType(); updateTraffic(); updateMyLocation(); updateBuildings(); updateIndoor(); } private boolean checkReady() { if (mMap == null) { Toast.makeText(this, R.string.map_not_ready, Toast.LENGTH_SHORT).show(); return false; } return true; } /** * Called when the Traffic checkbox is clicked. */ public void onTrafficToggled(View view) { updateTraffic(); } private void updateTraffic() { if (!checkReady()) { return; } mMap.setTrafficEnabled(mTrafficCheckbox.isChecked()); } /** * Called when the MyLocation checkbox is clicked. */ public void onMyLocationToggled(View view) { updateMyLocation(); } @SuppressLint("MissingPermission") private void updateMyLocation() { if (!checkReady()) { return; } if (!mMyLocationCheckbox.isChecked()) { mMap.setMyLocationEnabled(false); return; } // Enable the location layer. Request the location permission if needed. if (ContextCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) { mMap.setMyLocationEnabled(true); } else { // Uncheck the box until the layer has been enabled and request missing permission. mMyLocationCheckbox.setChecked(false); PermissionUtils.requestPermission(this, LOCATION_PERMISSION_REQUEST_CODE, Manifest.permission.ACCESS_FINE_LOCATION, false); } } @SuppressLint("MissingPermission") @Override public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) { if (requestCode != LOCATION_PERMISSION_REQUEST_CODE) { return; } if (PermissionUtils.isPermissionGranted(permissions, results, Manifest.permission.ACCESS_FINE_LOCATION)) { mMap.setMyLocationEnabled(true); mMyLocationCheckbox.setChecked(true); } else { mShowPermissionDeniedDialog = true; } } @Override protected void onResumeFragments() { super.onResumeFragments(); if (mShowPermissionDeniedDialog) { PermissionUtils.PermissionDeniedDialog .newInstance(false).show(getSupportFragmentManager(), "dialog"); mShowPermissionDeniedDialog = false; } } /** * Called when the Buildings checkbox is clicked. */ public void onBuildingsToggled(View view) { updateBuildings(); } private void updateBuildings() { if (!checkReady()) { return; } mMap.setBuildingsEnabled(mBuildingsCheckbox.isChecked()); } /** * Called when the Indoor checkbox is clicked. */ public void onIndoorToggled(View view) { updateIndoor(); } private void updateIndoor() { if (!checkReady()) { return; } mMap.setIndoorEnabled(mIndoorCheckbox.isChecked()); } @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { updateMapType(); } private void updateMapType() { // No toast because this can also be called by the Android framework in onResume() at which // point mMap may not be ready yet. if (mMap == null) { return; } String layerName = ((String) mSpinner.getSelectedItem()); if (layerName.equals(getString(R.string.normal))) { mMap.setMapType(MAP_TYPE_NORMAL); } else if (layerName.equals(getString(R.string.hybrid))) { mMap.setMapType(MAP_TYPE_HYBRID); } else if (layerName.equals(getString(R.string.satellite))) { mMap.setMapType(MAP_TYPE_SATELLITE); } else if (layerName.equals(getString(R.string.terrain))) { mMap.setMapType(MAP_TYPE_TERRAIN); } else if (layerName.equals(getString(R.string.none_map))) { mMap.setMapType(MAP_TYPE_NONE); } else { Log.i("LDA", "Error setting layer with name " + layerName); } } @Override public void onNothingSelected(AdapterView<?> parent) { // Do nothing. } }
/* * Copyright 2016 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.util.ReferenceCountUtil; import io.netty.util.internal.StringUtil; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import java.util.Arrays; import java.util.Collection; import static io.netty.handler.codec.http.HttpHeaderValues.CLOSE; import static io.netty.handler.codec.http.HttpHeaderValues.KEEP_ALIVE; import static io.netty.handler.codec.http.HttpHeaderValues.MULTIPART_MIXED; import static io.netty.handler.codec.http.HttpResponseStatus.NO_CONTENT; import static io.netty.handler.codec.http.HttpResponseStatus.OK; import static io.netty.handler.codec.http.HttpUtil.isContentLengthSet; import static io.netty.handler.codec.http.HttpUtil.isKeepAlive; import static io.netty.handler.codec.http.HttpUtil.setContentLength; import static io.netty.handler.codec.http.HttpUtil.setKeepAlive; import static io.netty.handler.codec.http.HttpUtil.setTransferEncodingChunked; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @RunWith(Parameterized.class) public class HttpServerKeepAliveHandlerTest { private static final String REQUEST_KEEP_ALIVE = "REQUEST_KEEP_ALIVE"; private static final int NOT_SELF_DEFINED_MSG_LENGTH = 0; private static final int SET_RESPONSE_LENGTH = 1; private static final int SET_MULTIPART = 2; private static final int SET_CHUNKED = 4; private final boolean isKeepAliveResponseExpected; private final HttpVersion httpVersion; private final HttpResponseStatus responseStatus; private final String sendKeepAlive; private final int setSelfDefinedMessageLength; private final String setResponseConnection; private EmbeddedChannel channel; @Parameters public static Collection<Object[]> keepAliveProvider() { return Arrays.asList(new Object[][] { { true, HttpVersion.HTTP_1_0, OK, REQUEST_KEEP_ALIVE, SET_RESPONSE_LENGTH, KEEP_ALIVE }, // 0 { true, HttpVersion.HTTP_1_0, OK, REQUEST_KEEP_ALIVE, SET_MULTIPART, KEEP_ALIVE }, // 1 { false, HttpVersion.HTTP_1_0, OK, null, SET_RESPONSE_LENGTH, null }, // 2 { true, HttpVersion.HTTP_1_1, OK, REQUEST_KEEP_ALIVE, SET_RESPONSE_LENGTH, null }, // 3 { false, HttpVersion.HTTP_1_1, OK, REQUEST_KEEP_ALIVE, SET_RESPONSE_LENGTH, CLOSE }, // 4 { true, HttpVersion.HTTP_1_1, OK, REQUEST_KEEP_ALIVE, SET_MULTIPART, null }, // 5 { true, HttpVersion.HTTP_1_1, OK, REQUEST_KEEP_ALIVE, SET_CHUNKED, null }, // 6 { false, HttpVersion.HTTP_1_1, OK, null, SET_RESPONSE_LENGTH, null }, // 7 { false, HttpVersion.HTTP_1_0, OK, REQUEST_KEEP_ALIVE, NOT_SELF_DEFINED_MSG_LENGTH, null }, // 8 { false, HttpVersion.HTTP_1_0, OK, null, NOT_SELF_DEFINED_MSG_LENGTH, null }, // 9 { false, HttpVersion.HTTP_1_1, OK, REQUEST_KEEP_ALIVE, NOT_SELF_DEFINED_MSG_LENGTH, null }, // 10 { false, HttpVersion.HTTP_1_1, OK, null, NOT_SELF_DEFINED_MSG_LENGTH, null }, // 11 { false, HttpVersion.HTTP_1_0, OK, REQUEST_KEEP_ALIVE, SET_RESPONSE_LENGTH, null }, // 12 { true, HttpVersion.HTTP_1_1, NO_CONTENT, REQUEST_KEEP_ALIVE, NOT_SELF_DEFINED_MSG_LENGTH, null}, // 13 { false, HttpVersion.HTTP_1_0, NO_CONTENT, null, NOT_SELF_DEFINED_MSG_LENGTH, null} // 14 }); } public HttpServerKeepAliveHandlerTest(boolean isKeepAliveResponseExpected, HttpVersion httpVersion, HttpResponseStatus responseStatus, String sendKeepAlive, int setSelfDefinedMessageLength, CharSequence setResponseConnection) { this.isKeepAliveResponseExpected = isKeepAliveResponseExpected; this.httpVersion = httpVersion; this.responseStatus = responseStatus; this.sendKeepAlive = sendKeepAlive; this.setSelfDefinedMessageLength = setSelfDefinedMessageLength; this.setResponseConnection = setResponseConnection == null? null : setResponseConnection.toString(); } @Before public void setUp() { channel = new EmbeddedChannel(new HttpServerKeepAliveHandler()); } @Test public void test_KeepAlive() throws Exception { FullHttpRequest request = new DefaultFullHttpRequest(httpVersion, HttpMethod.GET, "/v1/foo/bar"); setKeepAlive(request, REQUEST_KEEP_ALIVE.equals(sendKeepAlive)); HttpResponse response = new DefaultFullHttpResponse(httpVersion, responseStatus); if (!StringUtil.isNullOrEmpty(setResponseConnection)) { response.headers().set(HttpHeaderNames.CONNECTION, setResponseConnection); } setupMessageLength(response); assertTrue(channel.writeInbound(request)); Object requestForwarded = channel.readInbound(); assertEquals(request, requestForwarded); ReferenceCountUtil.release(requestForwarded); channel.writeAndFlush(response); HttpResponse writtenResponse = channel.readOutbound(); assertEquals("channel.isOpen", isKeepAliveResponseExpected, channel.isOpen()); assertEquals("response keep-alive", isKeepAliveResponseExpected, isKeepAlive(writtenResponse)); ReferenceCountUtil.release(writtenResponse); assertFalse(channel.finishAndReleaseAll()); } @Test public void testConnectionCloseHeaderHandledCorrectly() throws Exception { HttpResponse response = new DefaultFullHttpResponse(httpVersion, responseStatus); response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE); setupMessageLength(response); channel.writeAndFlush(response); HttpResponse writtenResponse = channel.readOutbound(); assertFalse(channel.isOpen()); ReferenceCountUtil.release(writtenResponse); assertFalse(channel.finishAndReleaseAll()); } @Test public void testConnectionCloseHeaderHandledCorrectlyForVoidPromise() throws Exception { HttpResponse response = new DefaultFullHttpResponse(httpVersion, responseStatus); response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE); setupMessageLength(response); channel.writeAndFlush(response, channel.voidPromise()); HttpResponse writtenResponse = channel.readOutbound(); assertFalse(channel.isOpen()); ReferenceCountUtil.release(writtenResponse); assertFalse(channel.finishAndReleaseAll()); } @Test public void test_PipelineKeepAlive() { FullHttpRequest firstRequest = new DefaultFullHttpRequest(httpVersion, HttpMethod.GET, "/v1/foo/bar"); setKeepAlive(firstRequest, true); FullHttpRequest secondRequest = new DefaultFullHttpRequest(httpVersion, HttpMethod.GET, "/v1/foo/bar"); setKeepAlive(secondRequest, REQUEST_KEEP_ALIVE.equals(sendKeepAlive)); FullHttpRequest finalRequest = new DefaultFullHttpRequest(httpVersion, HttpMethod.GET, "/v1/foo/bar"); setKeepAlive(finalRequest, false); FullHttpResponse response = new DefaultFullHttpResponse(httpVersion, responseStatus); FullHttpResponse informationalResp = new DefaultFullHttpResponse(httpVersion, HttpResponseStatus.PROCESSING); setKeepAlive(response, true); setContentLength(response, 0); setKeepAlive(informationalResp, true); assertTrue(channel.writeInbound(firstRequest, secondRequest, finalRequest)); Object requestForwarded = channel.readInbound(); assertEquals(firstRequest, requestForwarded); ReferenceCountUtil.release(requestForwarded); channel.writeAndFlush(response.retainedDuplicate()); HttpResponse firstResponse = channel.readOutbound(); assertTrue("channel.isOpen", channel.isOpen()); assertTrue("response keep-alive", isKeepAlive(firstResponse)); ReferenceCountUtil.release(firstResponse); requestForwarded = channel.readInbound(); assertEquals(secondRequest, requestForwarded); ReferenceCountUtil.release(requestForwarded); channel.writeAndFlush(informationalResp); HttpResponse writtenInfoResp = channel.readOutbound(); assertTrue("channel.isOpen", channel.isOpen()); assertTrue("response keep-alive", isKeepAlive(writtenInfoResp)); ReferenceCountUtil.release(writtenInfoResp); if (!StringUtil.isNullOrEmpty(setResponseConnection)) { response.headers().set(HttpHeaderNames.CONNECTION, setResponseConnection); } else { response.headers().remove(HttpHeaderNames.CONNECTION); } setupMessageLength(response); channel.writeAndFlush(response.retainedDuplicate()); HttpResponse secondResponse = channel.readOutbound(); assertEquals("channel.isOpen", isKeepAliveResponseExpected, channel.isOpen()); assertEquals("response keep-alive", isKeepAliveResponseExpected, isKeepAlive(secondResponse)); ReferenceCountUtil.release(secondResponse); requestForwarded = channel.readInbound(); assertEquals(finalRequest, requestForwarded); ReferenceCountUtil.release(requestForwarded); if (isKeepAliveResponseExpected) { channel.writeAndFlush(response); HttpResponse finalResponse = channel.readOutbound(); assertFalse("channel.isOpen", channel.isOpen()); assertFalse("response keep-alive", isKeepAlive(finalResponse)); } ReferenceCountUtil.release(response); assertFalse(channel.finishAndReleaseAll()); } private void setupMessageLength(HttpResponse response) { switch (setSelfDefinedMessageLength) { case NOT_SELF_DEFINED_MSG_LENGTH: if (isContentLengthSet(response)) { response.headers().remove(HttpHeaderNames.CONTENT_LENGTH); } break; case SET_RESPONSE_LENGTH: setContentLength(response, 0); break; case SET_CHUNKED: setTransferEncodingChunked(response, true); break; case SET_MULTIPART: response.headers().set(HttpHeaderNames.CONTENT_TYPE, MULTIPART_MIXED.toUpperCase()); break; default: throw new IllegalArgumentException("selfDefinedMessageLength: " + setSelfDefinedMessageLength); } } }
/* * Copyright 2014 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.logic.inventory; import com.google.common.collect.Lists; import org.terasology.entitySystem.entity.EntityManager; import org.terasology.entitySystem.entity.EntityRef; import org.terasology.entitySystem.event.ReceiveEvent; import org.terasology.entitySystem.systems.BaseComponentSystem; import org.terasology.entitySystem.systems.RegisterMode; import org.terasology.entitySystem.systems.RegisterSystem; import org.terasology.logic.inventory.action.GiveItemAction; import org.terasology.logic.inventory.action.MoveItemAction; import org.terasology.logic.inventory.action.RemoveItemAction; import org.terasology.logic.inventory.action.SwitchItemAction; import org.terasology.logic.inventory.events.BeforeItemPutInInventory; import org.terasology.logic.inventory.events.BeforeItemRemovedFromInventory; import org.terasology.logic.inventory.events.InventoryChangeAcknowledgedRequest; import org.terasology.logic.inventory.events.MoveItemAmountRequest; import org.terasology.logic.inventory.events.MoveItemRequest; import org.terasology.logic.inventory.events.MoveItemToSlotsRequest; import org.terasology.registry.In; import org.terasology.registry.Share; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; /** */ @RegisterSystem(RegisterMode.AUTHORITY) @Share(value = InventoryManager.class) public class InventoryAuthoritySystem extends BaseComponentSystem implements InventoryManager { @In private EntityManager entityManager; public void setEntityManager(EntityManager entityManager) { this.entityManager = entityManager; } @ReceiveEvent(components = {InventoryComponent.class}) public void switchItem(SwitchItemAction event, EntityRef entity) { switchItem(entity, event.getInstigator(), event.getSlotFrom(), event.getTo(), event.getSlotTo()); } @ReceiveEvent(components = {InventoryComponent.class}) public void moveItem(MoveItemAction event, EntityRef entity) { moveItem(entity, event.getInstigator(), event.getSlotFrom(), event.getTo(), event.getSlotTo(), event.getCount()); } @ReceiveEvent(components = {InventoryComponent.class}) public void removeItem(RemoveItemAction event, EntityRef entity) { final EntityRef result = removeItemInternal(entity, event.getInstigator(), event.getItems(), event.isDestroyRemoved(), event.getCount()); if (result != null) { if (result != EntityRef.NULL) { event.setRemovedItem(result); } event.consume(); } } private EntityRef removeItemFromSlots(EntityRef instigator, boolean destroyRemoved, EntityRef entity, List<Integer> slotsWithItem, int toRemove) { int shrinkSlotNo = -1; int shrinkCountResult = 0; List<Integer> slotsTotallyConsumed = new LinkedList<>(); int removesRemaining = toRemove; for (int slot : slotsWithItem) { EntityRef itemAtEntity = InventoryUtils.getItemAt(entity, slot); ItemComponent itemAt = itemAtEntity.getComponent(ItemComponent.class); if (itemAt.stackCount <= removesRemaining) { if (canRemoveItemFromSlot(instigator, entity, itemAtEntity, slot)) { slotsTotallyConsumed.add(slot); removesRemaining -= itemAt.stackCount; } } else { shrinkSlotNo = slot; shrinkCountResult = itemAt.stackCount - removesRemaining; removesRemaining = 0; } if (removesRemaining == 0) { break; } } if (removesRemaining > 0) { return null; } EntityRef removed = null; int removedCount = 0; for (int slot : slotsTotallyConsumed) { EntityRef itemAt = InventoryUtils.getItemAt(entity, slot); removedCount += InventoryUtils.getStackCount(itemAt); if (destroyRemoved) { InventoryUtils.putItemIntoSlot(entity, EntityRef.NULL, slot); itemAt.destroy(); } else { if (removed == null) { InventoryUtils.putItemIntoSlot(entity, EntityRef.NULL, slot); removed = itemAt; } else { InventoryUtils.putItemIntoSlot(entity, EntityRef.NULL, slot); itemAt.destroy(); } } } if (shrinkSlotNo > -1) { EntityRef itemAt = InventoryUtils.getItemAt(entity, shrinkSlotNo); removedCount += InventoryUtils.getStackCount(itemAt) - shrinkCountResult; if (destroyRemoved) { InventoryUtils.adjustStackSize(entity, shrinkSlotNo, shrinkCountResult); } else { if (removed == null) { removed = entityManager.copy(itemAt); } InventoryUtils.adjustStackSize(entity, shrinkSlotNo, shrinkCountResult); } } if (removed != null) { ItemComponent item = removed.getComponent(ItemComponent.class); if (item.stackCount != removedCount) { item.stackCount = (byte) removedCount; removed.saveComponent(item); } return removed; } return EntityRef.NULL; } @ReceiveEvent(components = {InventoryComponent.class}) public void giveItem(GiveItemAction event, EntityRef entity) { if (giveItem(entity, event.getInstigator(), event.getItem(), event.getSlots())) { event.consume(); } } private boolean giveItemToSlots(EntityRef instigator, EntityRef entity, EntityRef item, List<Integer> slots) { int toConsume = InventoryUtils.getStackCount(item); Map<Integer, Integer> consumableCount = new LinkedHashMap<>(); // First: check which slots we can merge into for (int slot : slots) { EntityRef itemAtEntity = InventoryUtils.getItemAt(entity, slot); ItemComponent itemAt = itemAtEntity.getComponent(ItemComponent.class); if (itemAt != null && InventoryUtils.isSameItem(item, itemAtEntity)) { int spaceInSlot = itemAt.maxStackSize - itemAt.stackCount; int toAdd = Math.min(toConsume, spaceInSlot); if (toAdd > 0) { consumableCount.put(slot, toAdd); toConsume -= toAdd; if (toConsume == 0) { break; } } } } int emptySlotNo = -1; int emptySlotCount = toConsume; if (toConsume > 0) { // Next: check which slots are empty and figure out where to add for (int slot : slots) { EntityRef itemAtEntity = InventoryUtils.getItemAt(entity, slot); ItemComponent itemAt = itemAtEntity.getComponent(ItemComponent.class); if (itemAt == null && canPutItemIntoSlot(instigator, entity, item, slot)) { emptySlotNo = slot; emptySlotCount = toConsume; toConsume = 0; break; } } } if (toConsume > 0) { return false; } for (Map.Entry<Integer, Integer> slotCount : consumableCount.entrySet()) { int slot = slotCount.getKey(); int count = slotCount.getValue(); EntityRef itemAtEntity = InventoryUtils.getItemAt(entity, slot); ItemComponent itemAt = itemAtEntity.getComponent(ItemComponent.class); InventoryUtils.adjustStackSize(entity, slot, itemAt.stackCount + count); } if (emptySlotNo > -1) { ItemComponent sourceItem = item.getComponent(ItemComponent.class); sourceItem.stackCount = (byte) emptySlotCount; item.saveComponent(sourceItem); InventoryUtils.putItemIntoSlot(entity, item, emptySlotNo); } else { item.destroy(); } return true; } private boolean canPutItemIntoSlot(EntityRef instigator, EntityRef entity, EntityRef item, int slot) { if (!item.exists()) { return true; } BeforeItemPutInInventory itemPut = new BeforeItemPutInInventory(instigator, item, slot); entity.send(itemPut); return !itemPut.isConsumed(); } private boolean canRemoveItemFromSlot(EntityRef instigator, EntityRef entity, EntityRef item, int slot) { if (!item.exists()) { return true; } BeforeItemRemovedFromInventory itemRemoved = new BeforeItemRemovedFromInventory(instigator, item, slot); entity.send(itemRemoved); return !itemRemoved.isConsumed(); } @ReceiveEvent public void moveItemAmountRequest(MoveItemAmountRequest request, EntityRef entity) { try { InventoryUtils.moveItemAmount(request.getInstigator(), request.getFromInventory(), request.getFromSlot(), request.getToInventory(), request.getToSlot(), request.getAmount()); } finally { entity.send(new InventoryChangeAcknowledgedRequest(request.getChangeId())); } } @ReceiveEvent public void moveItemRequest(MoveItemRequest request, EntityRef entity) { try { InventoryUtils.moveItem(request.getInstigator(), request.getFromInventory(), request.getFromSlot(), request.getToInventory(), request.getToSlot()); } finally { entity.send(new InventoryChangeAcknowledgedRequest(request.getChangeId())); } } @ReceiveEvent public void moveItemToSlotsRequest(MoveItemToSlotsRequest request, EntityRef entity) { try { InventoryUtils.moveItemToSlots(request.getInstigator(), request.getFromInventory(), request.getFromSlot(), request.getToInventory(), request.getToSlots()); } finally { entity.send(new InventoryChangeAcknowledgedRequest(request.getChangeId())); } } @Override public boolean canStackTogether(EntityRef itemA, EntityRef itemB) { return InventoryUtils.canStackInto(itemA, itemB); } @Override public int getStackSize(EntityRef item) { return InventoryUtils.getStackCount(item); } @Override public EntityRef getItemInSlot(EntityRef inventoryEntity, int slot) { return InventoryUtils.getItemAt(inventoryEntity, slot); } @Override public int findSlotWithItem(EntityRef inventoryEntity, EntityRef item) { return InventoryUtils.getSlotWithItem(inventoryEntity, item); } @Override public int getNumSlots(EntityRef inventoryEntity) { return InventoryUtils.getSlotCount(inventoryEntity); } @Override public boolean giveItem(EntityRef inventory, EntityRef instigator, EntityRef item) { return giveItem(inventory, instigator, item, null); } @Override public boolean giveItem(EntityRef inventory, EntityRef instigator, EntityRef item, int slot) { return giveItem(inventory, instigator, item, Arrays.asList(slot)); } @Override public boolean giveItem(EntityRef inventory, EntityRef instigator, EntityRef item, List<Integer> slots) { ItemComponent itemToGive = item.getComponent(ItemComponent.class); if (itemToGive == null) { return true; } List<Integer> fillSlots = slots; if (fillSlots == null) { int slotCount = InventoryUtils.getSlotCount(inventory); fillSlots = Lists.newArrayList(); for (int slot = 0; slot < slotCount; slot++) { fillSlots.add(slot); } } return giveItemToSlots(instigator, inventory, item, fillSlots); } @Override public EntityRef removeItem(EntityRef inventory, EntityRef instigator, EntityRef item, boolean destroyRemoved) { return removeItemInternal(inventory, instigator, Arrays.asList(item), destroyRemoved, null); } @Override public EntityRef removeItem(EntityRef inventory, EntityRef instigator, EntityRef item, boolean destroyRemoved, int count) { return removeItemInternal(inventory, instigator, Arrays.asList(item), destroyRemoved, count); } @Override public EntityRef removeItem(EntityRef inventory, EntityRef instigator, List<EntityRef> items, boolean destroyRemoved) { return removeItemInternal(inventory, instigator, items, destroyRemoved, null); } @Override public EntityRef removeItem(EntityRef inventory, EntityRef instigator, List<EntityRef> items, boolean destroyRemoved, int count) { return removeItemInternal(inventory, instigator, items, destroyRemoved, count); } @Override public EntityRef removeItem(EntityRef inventory, EntityRef instigator, int slotNo, boolean destroyRemoved, int count) { EntityRef item = InventoryUtils.getItemAt(inventory, slotNo); if (InventoryUtils.getStackCount(item) < count) { return null; } return removeItemFromSlots(instigator, destroyRemoved, inventory, Collections.singletonList(slotNo), count); } private EntityRef removeItemInternal(EntityRef inventory, EntityRef instigator, List<EntityRef> items, boolean destroyRemoved, Integer count) { final EntityRef firstItem = items.get(0); for (EntityRef item : items) { if (item != firstItem && !InventoryUtils.isSameItem(firstItem, item)) { return null; } } for (EntityRef item : items) { ItemComponent itemToRemove = item.getComponent(ItemComponent.class); if (itemToRemove == null) { return EntityRef.NULL; } } List<Integer> slotsWithItem = new LinkedList<>(); for (EntityRef item : items) { int slotWithItem = InventoryUtils.getSlotWithItem(inventory, item); if (slotWithItem == -1) { return null; } slotsWithItem.add(slotWithItem); } Integer toRemove = count; if (toRemove == null) { toRemove = 0; for (EntityRef item : items) { toRemove += InventoryUtils.getStackCount(item); } } return removeItemFromSlots(instigator, destroyRemoved, inventory, slotsWithItem, toRemove); } @Override public boolean moveItem(EntityRef fromInventory, EntityRef instigator, int slotFrom, EntityRef toInventory, int slotTo, int count) { return InventoryUtils.moveItemAmount(instigator, fromInventory, slotFrom, toInventory, slotTo, count); } @Override public boolean moveItemToSlots(EntityRef instigator, EntityRef fromInventory, int slotFrom, EntityRef toInventory, List<Integer> toSlots) { return InventoryUtils.moveItemToSlots(instigator, fromInventory, slotFrom, toInventory, toSlots); } @Override public boolean switchItem(EntityRef fromInventory, EntityRef instigator, int slotFrom, EntityRef toInventory, int slotTo) { return InventoryUtils.moveItem(instigator, fromInventory, slotFrom, toInventory, slotTo); } }
package org.apache.lucene.index; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.Arrays; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; /** * Exposes {@link TermsEnum} API, merged from {@link TermsEnum} API of sub-segments. * This does a merge sort, by term text, of the sub-readers. * * @lucene.experimental */ public final class MultiTermsEnum extends TermsEnum { private final TermMergeQueue queue; private final TermsEnumWithSlice[] subs; // all of our subs (one per sub-reader) private final TermsEnumWithSlice[] currentSubs; // current subs that have at least one term for this field private final TermsEnumWithSlice[] top; private final MultiPostingsEnum.EnumWithSlice[] subDocs; private BytesRef lastSeek; private boolean lastSeekExact; private final BytesRefBuilder lastSeekScratch = new BytesRefBuilder(); private int numTop; private int numSubs; private BytesRef current; static class TermsEnumIndex { public final static TermsEnumIndex[] EMPTY_ARRAY = new TermsEnumIndex[0]; final int subIndex; final TermsEnum termsEnum; public TermsEnumIndex(TermsEnum termsEnum, int subIndex) { this.termsEnum = termsEnum; this.subIndex = subIndex; } } /** Returns how many sub-reader slices contain the current * term. @see #getMatchArray */ public int getMatchCount() { return numTop; } /** Returns sub-reader slices positioned to the current term. */ public TermsEnumWithSlice[] getMatchArray() { return top; } /** Sole constructor. * @param slices Which sub-reader slices we should * merge. */ public MultiTermsEnum(ReaderSlice[] slices) { queue = new TermMergeQueue(slices.length); top = new TermsEnumWithSlice[slices.length]; subs = new TermsEnumWithSlice[slices.length]; subDocs = new MultiPostingsEnum.EnumWithSlice[slices.length]; for(int i=0;i<slices.length;i++) { subs[i] = new TermsEnumWithSlice(i, slices[i]); subDocs[i] = new MultiPostingsEnum.EnumWithSlice(); subDocs[i].slice = slices[i]; } currentSubs = new TermsEnumWithSlice[slices.length]; } @Override public BytesRef term() { return current; } /** The terms array must be newly created TermsEnum, ie * {@link TermsEnum#next} has not yet been called. */ public TermsEnum reset(TermsEnumIndex[] termsEnumsIndex) throws IOException { assert termsEnumsIndex.length <= top.length; numSubs = 0; numTop = 0; queue.clear(); for(int i=0;i<termsEnumsIndex.length;i++) { final TermsEnumIndex termsEnumIndex = termsEnumsIndex[i]; assert termsEnumIndex != null; final BytesRef term = termsEnumIndex.termsEnum.next(); if (term != null) { final TermsEnumWithSlice entry = subs[termsEnumIndex.subIndex]; entry.reset(termsEnumIndex.termsEnum, term); queue.add(entry); currentSubs[numSubs++] = entry; } else { // field has no terms } } if (queue.size() == 0) { return TermsEnum.EMPTY; } else { return this; } } @Override public boolean seekExact(BytesRef term) throws IOException { queue.clear(); numTop = 0; boolean seekOpt = false; if (lastSeek != null && lastSeek.compareTo(term) <= 0) { seekOpt = true; } lastSeek = null; lastSeekExact = true; for(int i=0;i<numSubs;i++) { final boolean status; // LUCENE-2130: if we had just seek'd already, prior // to this seek, and the new seek term is after the // previous one, don't try to re-seek this sub if its // current term is already beyond this new seek term. // Doing so is a waste because this sub will simply // seek to the same spot. if (seekOpt) { final BytesRef curTerm = currentSubs[i].current; if (curTerm != null) { final int cmp = term.compareTo(curTerm); if (cmp == 0) { status = true; } else if (cmp < 0) { status = false; } else { status = currentSubs[i].terms.seekExact(term); } } else { status = false; } } else { status = currentSubs[i].terms.seekExact(term); } if (status) { top[numTop++] = currentSubs[i]; current = currentSubs[i].current = currentSubs[i].terms.term(); assert term.equals(currentSubs[i].current); } } // if at least one sub had exact match to the requested // term then we found match return numTop > 0; } @Override public SeekStatus seekCeil(BytesRef term) throws IOException { queue.clear(); numTop = 0; lastSeekExact = false; boolean seekOpt = false; if (lastSeek != null && lastSeek.compareTo(term) <= 0) { seekOpt = true; } lastSeekScratch.copyBytes(term); lastSeek = lastSeekScratch.get(); for(int i=0;i<numSubs;i++) { final SeekStatus status; // LUCENE-2130: if we had just seek'd already, prior // to this seek, and the new seek term is after the // previous one, don't try to re-seek this sub if its // current term is already beyond this new seek term. // Doing so is a waste because this sub will simply // seek to the same spot. if (seekOpt) { final BytesRef curTerm = currentSubs[i].current; if (curTerm != null) { final int cmp = term.compareTo(curTerm); if (cmp == 0) { status = SeekStatus.FOUND; } else if (cmp < 0) { status = SeekStatus.NOT_FOUND; } else { status = currentSubs[i].terms.seekCeil(term); } } else { status = SeekStatus.END; } } else { status = currentSubs[i].terms.seekCeil(term); } if (status == SeekStatus.FOUND) { top[numTop++] = currentSubs[i]; current = currentSubs[i].current = currentSubs[i].terms.term(); } else { if (status == SeekStatus.NOT_FOUND) { currentSubs[i].current = currentSubs[i].terms.term(); assert currentSubs[i].current != null; queue.add(currentSubs[i]); } else { // enum exhausted currentSubs[i].current = null; } } } if (numTop > 0) { // at least one sub had exact match to the requested term return SeekStatus.FOUND; } else if (queue.size() > 0) { // no sub had exact match, but at least one sub found // a term after the requested term -- advance to that // next term: pullTop(); return SeekStatus.NOT_FOUND; } else { return SeekStatus.END; } } @Override public void seekExact(long ord) { throw new UnsupportedOperationException(); } @Override public long ord() { throw new UnsupportedOperationException(); } private void pullTop() { // extract all subs from the queue that have the same // top term assert numTop == 0; while(true) { top[numTop++] = queue.pop(); if (queue.size() == 0 || !(queue.top()).current.bytesEquals(top[0].current)) { break; } } current = top[0].current; } private void pushTop() throws IOException { // call next() on each top, and put back into queue for(int i=0;i<numTop;i++) { top[i].current = top[i].terms.next(); if (top[i].current != null) { queue.add(top[i]); } else { // no more fields in this reader } } numTop = 0; } @Override public BytesRef next() throws IOException { if (lastSeekExact) { // Must seekCeil at this point, so those subs that // didn't have the term can find the following term. // NOTE: we could save some CPU by only seekCeil the // subs that didn't match the last exact seek... but // most impls short-circuit if you seekCeil to term // they are already on. final SeekStatus status = seekCeil(current); assert status == SeekStatus.FOUND; lastSeekExact = false; } lastSeek = null; // restore queue pushTop(); // gather equal top fields if (queue.size() > 0) { pullTop(); } else { current = null; } return current; } @Override public int docFreq() throws IOException { int sum = 0; for(int i=0;i<numTop;i++) { sum += top[i].terms.docFreq(); } return sum; } @Override public long totalTermFreq() throws IOException { long sum = 0; for(int i=0;i<numTop;i++) { final long v = top[i].terms.totalTermFreq(); if (v == -1) { return v; } sum += v; } return sum; } @Override public PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) throws IOException { MultiPostingsEnum docsEnum; // Can only reuse if incoming enum is also a MultiDocsEnum if (reuse != null && reuse instanceof MultiPostingsEnum) { docsEnum = (MultiPostingsEnum) reuse; // ... and was previously created w/ this MultiTermsEnum: if (!docsEnum.canReuse(this)) { docsEnum = new MultiPostingsEnum(this, subs.length); } } else { docsEnum = new MultiPostingsEnum(this, subs.length); } final MultiBits multiLiveDocs; if (liveDocs instanceof MultiBits) { multiLiveDocs = (MultiBits) liveDocs; } else { multiLiveDocs = null; } int upto = 0; for(int i=0;i<numTop;i++) { final TermsEnumWithSlice entry = top[i]; final Bits b; if (multiLiveDocs != null) { // optimize for common case: requested skip docs is a // congruent sub-slice of MultiBits: in this case, we // just pull the liveDocs from the sub reader, rather // than making the inefficient // Slice(Multi(sub-readers)): final MultiBits.SubResult sub = multiLiveDocs.getMatchingSub(entry.subSlice); if (sub.matches) { b = sub.result; } else { // custom case: requested skip docs is foreign: // must slice it on every access b = new BitsSlice(liveDocs, entry.subSlice); } } else if (liveDocs != null) { b = new BitsSlice(liveDocs, entry.subSlice); } else { // no deletions b = null; } assert entry.index < docsEnum.subPostingsEnums.length: entry.index + " vs " + docsEnum.subPostingsEnums.length + "; " + subs.length; final PostingsEnum subPostingsEnum = entry.terms.postings(b, docsEnum.subPostingsEnums[entry.index], flags); assert subPostingsEnum != null; docsEnum.subPostingsEnums[entry.index] = subPostingsEnum; subDocs[upto].postingsEnum = subPostingsEnum; subDocs[upto].slice = entry.subSlice; upto++; } return docsEnum.reset(subDocs, upto); } final static class TermsEnumWithSlice { private final ReaderSlice subSlice; TermsEnum terms; public BytesRef current; final int index; public TermsEnumWithSlice(int index, ReaderSlice subSlice) { this.subSlice = subSlice; this.index = index; assert subSlice.length >= 0: "length=" + subSlice.length; } public void reset(TermsEnum terms, BytesRef term) { this.terms = terms; current = term; } @Override public String toString() { return subSlice.toString()+":"+terms; } } private final static class TermMergeQueue extends PriorityQueue<TermsEnumWithSlice> { TermMergeQueue(int size) { super(size); } @Override protected boolean lessThan(TermsEnumWithSlice termsA, TermsEnumWithSlice termsB) { final int cmp = termsA.current.compareTo(termsB.current); if (cmp != 0) { return cmp < 0; } else { return termsA.subSlice.start < termsB.subSlice.start; } } } @Override public String toString() { return "MultiTermsEnum(" + Arrays.toString(subs) + ")"; } }
package com.google.acre.DOM; /* * Copyright 2002-2008 Andy Clark * Copyright 2008 Metaweb Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import org.apache.xerces.xni.XNIException; import org.apache.xerces.xni.parser.XMLConfigurationException; import org.apache.xerces.xni.parser.XMLDocumentFilter; import org.apache.xerces.xni.parser.XMLDocumentSource; import org.apache.xerces.xni.parser.XMLInputSource; import org.apache.xerces.xni.parser.XMLParseException; import org.cyberneko.html.HTMLComponent; import org.cyberneko.html.HTMLConfiguration; import org.cyberneko.html.xercesbridge.XercesBridge; public class AcreHTMLConfiguration extends HTMLConfiguration { AcreTagBalancer tagBalancer = new AcreTagBalancer(); AcreHTMLScanner scanner = new AcreHTMLScanner(); AcreErrorReporter errorReporter = new AcreErrorReporter(); public class AcreErrorReporter extends ErrorReporter { /** Reports a warning. */ public void reportWarning(String key, Object[] args) throws XMLParseException { if (fErrorHandler != null) { fErrorHandler.warning(ERROR_DOMAIN, key, createException(key, args)); } } // reportWarning(String,Object[]) /** Reports an error. */ public void reportError(String key, Object[] args) throws XMLParseException { if (fErrorHandler != null) { fErrorHandler.error(ERROR_DOMAIN, key, createException(key, args)); } } // reportError(String,Object[]) /** Creates parse exception. */ protected XMLParseException createException(String key, Object[] args) { String message = formatMessage(key, args); return new XMLParseException(scanner, message); } // createException(String,Object[]):XMLParseException } public AcreHTMLConfiguration() { super(); // remove the default document scanner and add our own fHTMLComponents.remove(fDocumentScanner); fHTMLComponents.remove(fTagBalancer); addComponent(scanner); addComponent(tagBalancer); setProperty(ERROR_REPORTER, errorReporter); } /** * Pushes an input source onto the current entity stack. This * enables the scanner to transparently scan new content (e.g. * the output written by an embedded script). At the end of the * current entity, the scanner returns where it left off at the * time this entity source was pushed. * <p> * <strong>Hint:</strong> * To use this feature to insert the output of &lt;SCRIPT&gt; * tags, remember to buffer the <em>entire</em> output of the * processed instructions before pushing a new input source. * Otherwise, events may appear out of sequence. * * @param inputSource The new input source to start scanning. * @see #evaluateInputSource(XMLInputSource) */ public void pushInputSource(XMLInputSource inputSource) { scanner.pushInputSource(inputSource); } // pushInputSource(XMLInputSource) /** * <font color="red">EXPERIMENTAL: may change in next release</font><br/> * Immediately evaluates an input source and add the new content (e.g. * the output written by an embedded script). * * @param inputSource The new input source to start scanning. * @see #pushInputSource(XMLInputSource) */ public void evaluateInputSource(XMLInputSource inputSource) { scanner.evaluateInputSource(inputSource); } // evaluateInputSource(XMLInputSource) /** Parses a document. */ public void parse(XMLInputSource source) throws XNIException, IOException { setInputSource(source); parse(true); } // parse(XMLInputSource) /** * Sets the input source for the document to parse. * * @param inputSource The document's input source. * * @exception XMLConfigurationException Thrown if there is a * configuration error when initializing the * parser. * @exception IOException Thrown on I/O error. * * @see #parse(boolean) */ public void setInputSource(XMLInputSource inputSource) throws XMLConfigurationException, IOException { reset(); fCloseStream = inputSource.getByteStream() == null && inputSource.getCharacterStream() == null; scanner.setInputSource(inputSource); } // setInputSource(XMLInputSource) /** * Parses the document in a pull parsing fashion. * * @param complete True if the pull parser should parse the * remaining document completely. * * @return True if there is more document to parse. * * @exception XNIException Any XNI exception, possibly wrapping * another exception. * @exception IOException An IO exception from the parser, possibly * from a byte stream or character stream * supplied by the parser. * * @see #setInputSource */ public boolean parse(boolean complete) throws XNIException, IOException { try { boolean more = scanner.scanDocument(complete); if (!more) { cleanup(); } return more; } catch (XNIException e) { cleanup(); throw e; } catch (IOException e) { cleanup(); throw e; } } // parse(boolean):boolean /** * If the application decides to terminate parsing before the xml document * is fully parsed, the application should call this method to free any * resource allocated during parsing. For example, close all opened streams. */ public void cleanup() { scanner.cleanup(fCloseStream); } // cleanup() /** Resets the parser configuration. */ protected void reset() throws XMLConfigurationException { // reset components int size = fHTMLComponents.size(); for (int i = 0; i < size; i++) { HTMLComponent component = (HTMLComponent) fHTMLComponents.elementAt(i); component.reset(this); } // configure pipeline XMLDocumentSource lastSource = scanner; if (getFeature(BALANCE_TAGS)) { lastSource.setDocumentHandler(tagBalancer); tagBalancer.setDocumentSource(scanner); lastSource = tagBalancer; } if (getFeature(NAMESPACES)) { lastSource.setDocumentHandler(fNamespaceBinder); fNamespaceBinder.setDocumentSource(fTagBalancer); lastSource = fNamespaceBinder; } XMLDocumentFilter[] filters = (XMLDocumentFilter[]) getProperty(FILTERS); if (filters != null) { for (int i = 0; i < filters.length; i++) { XMLDocumentFilter filter = filters[i]; XercesBridge.getInstance().XMLDocumentFilter_setDocumentSource(filter, lastSource); lastSource.setDocumentHandler(filter); lastSource = filter; } } lastSource.setDocumentHandler(fDocumentHandler); } // reset() }
/* * Copyright 2013 Thomas Bocek * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package net.tomp2p.connection2; import io.netty.channel.EventLoopGroup; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.Semaphore; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import net.tomp2p.futures.BaseFutureAdapter; import net.tomp2p.futures.FutureChannelCreator; import net.tomp2p.futures.FutureDone; import net.tomp2p.p2p.RequestP2PConfiguration; import net.tomp2p.p2p.RoutingConfiguration; import net.tomp2p.p2p.builder.DHTBuilder; /** * Reserves a block of connections. * * @author Thomas Bocek * */ public class Reservation { private final int maxPermitsUDP; private final int maxPermitsTCP; private final int maxPermitsPermanentTCP; private final Semaphore semaphoreUPD; private final Semaphore semaphoreTCP; private final Semaphore semaphorePermanentTCP; private final ChannelClientConfiguration channelClientConfiguration; private final ExecutorService executor; private final BlockingQueue<Runnable> queue = new LinkedBlockingQueue<Runnable>(); private final EventLoopGroup workerGroup; // we should be fair, otherwise we see connection timeouts due to unfairness if busy private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock(true); private final Lock read = readWriteLock.readLock(); private final Lock write = readWriteLock.writeLock(); private boolean shutdown = false; private final Collection<ChannelCreator> channelCreators = Collections .synchronizedList(new ArrayList<ChannelCreator>()); private final FutureDone<Void> futureReservationDone = new FutureDone<Void>(); /** * Creates a new reservation class with the 3 permits. * * @param workerGroup * The worker group for both UDP and TCP channels. This will not be shutdown in this class, you need to * shutdown it outside. * @param channelClientConfiguration * Sets maxPermitsUDP: the number of maximum short-lived UDP connections, maxPermitsTCP: the number of * maximum short-lived TCP connections, maxPermitsPermanentTCP: the number of maximum permanent TCP * connections */ public Reservation(final EventLoopGroup workerGroup, final ChannelClientConfiguration channelClientConfiguration) { // single thread this.executor = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS, queue); this.workerGroup = workerGroup; this.maxPermitsUDP = channelClientConfiguration.maxPermitsUDP(); this.maxPermitsTCP = channelClientConfiguration.maxPermitsTCP(); this.maxPermitsPermanentTCP = channelClientConfiguration.maxPermitsPermanentTCP(); this.semaphoreUPD = new Semaphore(maxPermitsUDP); this.semaphoreTCP = new Semaphore(maxPermitsTCP); this.semaphorePermanentTCP = new Semaphore(maxPermitsPermanentTCP); this.channelClientConfiguration = channelClientConfiguration; } /** * @return The pending number of requests that are scheduled but not executed yet. */ public int pendingRequests() { return queue.size(); } /** * This will calculate the number of required connection for routing and request messages. * * @param routingConfiguration * Contains the number of routing requests in parallel * @param requestP2PConfiguration * Contains the number of requests for P2P operations in parallel * @param builder * The builder that tells us if we should use TCP or UPD * @return The future channel creator */ public FutureChannelCreator create(final RoutingConfiguration routingConfiguration, final RequestP2PConfiguration requestP2PConfiguration, final DHTBuilder<?> builder) { if (routingConfiguration == null && requestP2PConfiguration == null) { throw new IllegalArgumentException( "Both routingConfiguration and requestP2PConfiguration cannot be null"); } int nrConnectionsTCP = 0; int nrConnectionsUDP = 0; if (requestP2PConfiguration != null) { if (builder.isForceUDP()) { nrConnectionsUDP = requestP2PConfiguration.getParallel(); } else { nrConnectionsTCP = requestP2PConfiguration.getParallel(); } } if (routingConfiguration != null) { if (!builder.isForceTCP()) { nrConnectionsUDP = Math.max(nrConnectionsUDP, routingConfiguration.getParallel()); } else { nrConnectionsTCP = Math.max(nrConnectionsTCP, routingConfiguration.getParallel()); } } return create(nrConnectionsUDP, nrConnectionsTCP); } /** * Create a connection creator for short-lived connections. * * @param permitsUDP * The number of short-lived UDP connections * @param permitsTCP * The number of short-lived TCP connections * @return The future channel creator */ public FutureChannelCreator create(final int permitsUDP, final int permitsTCP) { if (permitsUDP > maxPermitsUDP) { throw new IllegalArgumentException("cannot aquire more UDP connections (" + permitsUDP + ") than maximum " + maxPermitsUDP); } if (permitsTCP > maxPermitsTCP) { throw new IllegalArgumentException("cannot aquire more TCP connections (" + permitsTCP + ") than maximum " + maxPermitsTCP); } FutureChannelCreator futureChannelCreator = new FutureChannelCreator(); FutureDone<Void> futureChannelCreationDone = new FutureDone<Void>(); futureChannelCreationDone.addListener(new BaseFutureAdapter<FutureDone<Void>>() { @Override public void operationComplete(final FutureDone<Void> future) throws Exception { // release the permits in all cases, otherwise we may see inconsitencies semaphoreUPD.release(permitsUDP); semaphoreTCP.release(permitsTCP); } }, false); // false is important, to be always the first listener executor.execute(new WaitReservation(futureChannelCreator, futureChannelCreationDone, permitsUDP, permitsTCP)); return futureChannelCreator; } /** * Create a connection creator for permanent connections. * * @param permitsPermanentTCP * The number of long-lived TCP connections * @return The future channel creator */ public FutureChannelCreator createPermanent(final int permitsPermanentTCP) { if (permitsPermanentTCP > maxPermitsPermanentTCP) { throw new IllegalArgumentException("cannot aquire more TCP connections (" + permitsPermanentTCP + ") than maximum " + maxPermitsPermanentTCP); } FutureChannelCreator futureChannelCreator = new FutureChannelCreator(); FutureDone<Void> futureChannelCreationDone = new FutureDone<Void>(); futureChannelCreationDone.addListener(new BaseFutureAdapter<FutureDone<Void>>() { @Override public void operationComplete(final FutureDone<Void> future) throws Exception { // release the permits in all cases, otherwise we may see inconsitencies semaphorePermanentTCP.release(permitsPermanentTCP); } }, false); // false is important, to be always the first listener executor.execute(new WaitReservationPermanent(futureChannelCreator, futureChannelCreationDone, permitsPermanentTCP)); return futureChannelCreator; } /** * Shutdown all the channel creators out there. * * @return The future when the shutdown is complete */ public FutureDone<Void> shutdown() { write.lock(); try { if (shutdown) { shutdownFuture().setFailed("already shutting down"); return shutdownFuture(); } shutdown = true; } finally { write.unlock(); } // fast shutdown for those that are in the queue is not required. We could let the executor finish since the // shutdown flag is set and the future will be set as well to "shutting down": // for (Runnable r : executor.shutdownNow()) { // if(r instanceof WaitReservation) { // WaitReservation wr = (WaitReservation) r; // wr.futureChannelCreator().setFailed("shutting down"); // } else { // WaitReservationPermanent wr = (WaitReservationPermanent) r; // wr.futureChannelCreator().setFailed("shutting down"); // } // } // the channelCreator does not change anymore from here on final int size = channelCreators.size(); if (size == 0) { complete(); } else { final AtomicInteger completeCounter = new AtomicInteger(0); for (final ChannelCreator channelCreator : channelCreators) { // this is very important that we set first the listener and then call shutdown. Otherwise, the order of // the listener calls is not guaranteed and we may call this listener before the semphore.release, // causing an exception. channelCreator.shutdownFuture().addListener(new BaseFutureAdapter<FutureDone<Void>>() { @Override public void operationComplete(final FutureDone<Void> future) throws Exception { if (completeCounter.incrementAndGet() == size) { complete(); } } }); channelCreator.shutdown(); } } return shutdownFuture(); } /** * Drain all semaphores and set the future to done. */ private void complete() { if (!semaphoreUPD.tryAcquire(maxPermitsUDP)) { throw new RuntimeException("Cannot shutdown, as connections (UDP) are still alive: " + semaphoreUPD); } if (!semaphoreTCP.tryAcquire(maxPermitsTCP)) { throw new RuntimeException("Cannot shutdown, as connections (TCP) are still alive: " + semaphoreTCP); } if (!semaphorePermanentTCP.tryAcquire(maxPermitsPermanentTCP)) { throw new RuntimeException("Cannot shutdown, as connections (pTCP) are still alive: " + semaphorePermanentTCP); } futureReservationDone.setDone(); } /** * @return The shutdown future that is used when calling {@link #shutdown()} */ public FutureDone<Void> shutdownFuture() { return futureReservationDone; } /** * Adds a channel creator to the set and also adds it the the shutdownlistener. * * @param channelCreator * The channel creator */ private void addToSet(final ChannelCreator channelCreator) { channelCreator.shutdownFuture().addListener(new BaseFutureAdapter<FutureDone<Void>>() { @Override public void operationComplete(final FutureDone<Void> future) throws Exception { read.lock(); try { if (shutdown) { return; } channelCreators.remove(channelCreator); } finally { read.unlock(); } } }); channelCreators.add(channelCreator); } /** * Tries to reserve a channel creator. If too many channel already created, wait until channels are closed. This * waiter is for the short-lived connections. * * @author Thomas Bocek * */ private class WaitReservation implements Runnable { private final FutureChannelCreator futureChannelCreator; private final FutureDone<Void> futureChannelCreationShutdown; private final int permitsUDP; private final int permitsTCP; /** * Creates a reservation that returns a {@link ChannelCreator} in a future once we have the semaphore. * * @param futureChannelCreator * The status of the creating * @param futureChannelCreationShutdown * The {@link ChannelCreator} shutdown feature needs to be passed since we need it for * {@link Reservation#shutdown()}. * @param permitsUDP * The number of permits for UDP * @param permitsTCP * The number of permits for TCP */ public WaitReservation(final FutureChannelCreator futureChannelCreator, final FutureDone<Void> futureChannelCreationShutdown, final int permitsUDP, final int permitsTCP) { this.futureChannelCreator = futureChannelCreator; this.futureChannelCreationShutdown = futureChannelCreationShutdown; this.permitsUDP = permitsUDP; this.permitsTCP = permitsTCP; } @Override public void run() { final ChannelCreator channelCreator; read.lock(); try { if (shutdown) { futureChannelCreator.setFailed("shutting down"); return; } try { semaphoreUPD.acquire(permitsUDP); } catch (InterruptedException e) { futureChannelCreator.setFailed(e); return; } try { semaphoreTCP.acquire(permitsTCP); } catch (InterruptedException e) { semaphoreUPD.release(permitsUDP); futureChannelCreator.setFailed(e); return; } channelCreator = new ChannelCreator(workerGroup, futureChannelCreationShutdown, permitsUDP, permitsTCP, channelClientConfiguration); addToSet(channelCreator); } finally { read.unlock(); } futureChannelCreator.reserved(channelCreator); } } /** * Tries to reserve a channel creator. If too many channel already created, wait until channels are closed. This * waiter is for the long-lived connections. * * @author Thomas Bocek * */ private final class WaitReservationPermanent implements Runnable { private final FutureChannelCreator futureChannelCreator; private final FutureDone<Void> futureChannelCreationShutdown; private final int permitsPermanentTCP; /** * Creates a reservation that returns a {@link ChannelCreator} in a future once we have the semaphore. * * @param futureChannelCreator * The status of the creating * @param futureChannelCreationShutdown * The {@link ChannelCreator} shutdown feature needs to be passed since we need it for * {@link Reservation#shutdown()}. * @param permitsPermanentTCP * The number of permits */ private WaitReservationPermanent(final FutureChannelCreator futureChannelCreator, final FutureDone<Void> futureChannelCreationShutdown, final int permitsPermanentTCP) { this.futureChannelCreator = futureChannelCreator; this.futureChannelCreationShutdown = futureChannelCreationShutdown; this.permitsPermanentTCP = permitsPermanentTCP; } @Override public void run() { ChannelCreator channelCreator; read.lock(); try { if (shutdown) { futureChannelCreator.setFailed("shutting down"); return; } try { semaphorePermanentTCP.acquire(permitsPermanentTCP); } catch (InterruptedException e) { futureChannelCreator.setFailed(e); return; } channelCreator = new ChannelCreator(workerGroup, futureChannelCreationShutdown, 0, permitsPermanentTCP, channelClientConfiguration); addToSet(channelCreator); } finally { read.unlock(); } futureChannelCreator.reserved(channelCreator); } } }
/* * Copyright 2011 Internet Archive * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.archive.jbs.util; import java.io.*; import java.net.*; import java.util.*; import java.util.regex.*; /** * <p> * Helper class for handling (international) domain names which * determines which part of a fully-qualified hostname is the * domain, or "site". * </p> * <p> * It's designed to use the rules maintained by Mozilla and the Public * Suffix List project: * <ul> * <li>http://mxr.mozilla.org/mozilla-central/source/netwerk/dns/effective_tld_names.dat?raw=1</li> * <li>http://publicsuffix.org/index.html</li> * </ul> * </p> * <p> * Typically, it is instantiated with the rules from the * <code>effective_tld_names.dat</code> file, but the rules can be * augmented for custom domain name determination. * For example, if a project wanted to treat each subdomain * under <code>blogger.com</code> as a separate domain, then * the rule could be added: * <pre>blogger.com</pre> * which would yield: * <pre>foo.blogger.com * bar.blogger.com * baz.blogger.com</pre> * as separate domains. Without this rule, they would all be * collasped into just <code>blogger.come</code>. * </p> */ public class IDNHelper { public Set<String> exact = new HashSet<String>(); public Set<String> exclude = new HashSet<String>(); public Set<Pattern> wild = new HashSet<Pattern>(); public void addRule( String rule ) { // Handle simple wildcards rules if ( rule.startsWith( "*." ) ) { if ( rule.length() < 3 ) return ; rule = IDN.toASCII( rule.substring( 2 ) ); // Transform the rule string into regex syntax rule = "[^.]+[.][^.]+[.]" + rule.replace( ".", "[.]" ) ; Pattern p = Pattern.compile( rule ); wild.add( p ); return ; } // Full-blown regex rules if ( rule.startsWith( "~" ) ) { rule = rule.substring( 1 ); Pattern p = Pattern.compile( rule ); wild.add( p ); return ; } // Exact and exclude rules. Set<String> rules = exact; if ( rule.startsWith( "!" ) ) { if ( rule.length() == 1 ) return ; rules = exclude; rule = rule.substring( 1 ); } rules.add( IDN.toASCII( rule ) ); } /** * Adds rules from the given Reader. Rules are expected to conform * to syntax in Mozilla's effective_tld_names.txt document. */ public void addRules( Reader r ) throws IOException { BufferedReader reader = new BufferedReader( r ); String line; while ( (line = reader.readLine() ) != null ) { line = line.trim(); if ( line.length() == 0 || line.startsWith( "//" ) ) continue; this.addRule( line ); } } /** * Return the domain of the given url, according to the rules added * to the IDNHelper. */ public String getDomain( URL u ) { return getDomain( u.getHost( ) ); } /** * Return the domain of the given host string, according to the * rules added to the IDNHelper. The input host string is expected * to be a valid fully-qualified hostname, such as those returned by * URL.getHost(). * * Returns <code>null</code> if domain cannot be determined. */ public String getDomain( String host ) { try { host = IDN.toASCII( host, IDN.ALLOW_UNASSIGNED ); } catch ( Exception e ) { host = null; } if ( host == null ) return null; int i; while ( (i = host.indexOf( '.' ) ) != -1 ) { String test = host.substring( i + 1 ); if ( exact.contains( test ) ) { return host; } if ( exclude.contains( test ) ) { return test; } if ( exclude.contains( host ) ) { return host; } for ( Pattern p : wild ) { Matcher m = p.matcher( host ); if ( m.matches( ) ) { if ( m.groupCount() > 0 ) { return m.group( 1 ); } return host; } } host = test; } return null; } /** * Constructs a new IDNHelper object, populating it with rules from * given Reader. Rules are expected to be in the same form as * Mozilla's effective_tld_names.dat file. */ public static IDNHelper build( Reader reader ) throws IOException { IDNHelper helper = new IDNHelper( ); helper.addRules( reader ); return helper; } /** * Command-line test driver. */ public static void main( String[] args ) throws Exception { if ( args.length < 2 || args[0].equals( "-h" ) || args[0].equals( "--help" ) ) { usage(); System.exit( 0 ); } Reader reader = new InputStreamReader( new FileInputStream( args[0] ), "utf-8" ); IDNHelper helper = build( reader ); for ( int i = 1; i < args.length ; i++ ) { if ( args[i].equals("-") ) { BufferedReader r = new BufferedReader( new InputStreamReader( System.in, "utf-8" ) ); String line; while ( ( line = r.readLine() ) != null ) { line = line.trim(); if ( line.length() == 0 || line.startsWith( "#" ) || line.startsWith( "//" ) ) continue; URL u = new URL( line ); System.out.println( helper.getDomain( u.getHost( ) ) + "\t" + line ); } } else { URL u = new URL( args[i] ); System.out.println( helper.getDomain( u.getHost( ) ) + "\t" + args[i] ); } } } public static void usage( ) { System.out.println( "IDNHelper <rules> <url>..." ); System.out.println( " Load rules and emit domain for given URLs" ); System.out.println( " If <url> is '-' then URLs will be read from stdin." ); } }