gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package com.thaze.peakmatch;
import com.thaze.peakmatch.event.BasicEvent;
import com.thaze.peakmatch.event.Event;
import com.thaze.peakmatch.event.EventException;
import com.thaze.peakmatch.processors.AnalyseProcessor;
import com.thaze.peakmatch.processors.BruteForceProcessor;
import com.thaze.peakmatch.processors.ClusteringProcessor;
import com.thaze.peakmatch.processors.DominantFreqProcessor;
import com.thaze.peakmatch.processors.FFTPrecacheProcessor;
import com.thaze.peakmatch.processors.PeakMatchProcessor;
import com.thaze.peakmatch.processors.Plot1DProcessor;
import com.thaze.peakmatch.processors.Plot2DProcessor;
import com.thaze.peakmatch.processors.PostProcessProcessor;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
/**
* @author Simon Rodgers
*/
public class XCorrProcessor {
public static final String CONF_FILE = "xcorr.conf";
public static final String XCORR_CANDIDATES_FILE = "xcorr.candidates";
public static final String XCORR_POSTPROCESS_FILE = "xcorr.postprocess";
public static final String XCORR_DOMINANTFREQ_FILE = "xcorr.dominantfreq";
public static final String XCORR_BRUTEFORCE_FILE = "xcorr.bruteforce";
public static final String XCORR_SAMPLE_SAVE_FILE = "xcorr.saved";
public static void main(String[] args) {
try{
EventProcessorConf conf = new EventProcessorConf(CONF_FILE);
if (args.length > 0){
handleCommandLine(args, conf);
return;
}
new XCorrProcessor().run(conf);
} catch (Throwable e){
System.err.println("error: " + e.getMessage());
// e.printStackTrace();
// if (null != e.getCause())
// e.printStackTrace();
}
}
private static void handleCommandLine(String[] args, EventProcessorConf conf) throws EventException {
try{
Options options = new Options();
options.addOption(new Option("help", "print usage"));
options.addOption(new Option("xcorr", "cross-correlate two events"));
options.addOption(OptionBuilder.withArgName("filenames ...").hasArgs(Option.UNLIMITED_VALUES).withDescription("event filenames - absolute or relative to dataset.full").create("events"));
options.addOption(OptionBuilder.withArgName("dimension").hasOptionalArg().withDescription("plot events, with dimension 1d/2d/*tiny1d").create("plot"));
options.addOption(new Option("fftdom", "perform FFTDOMINANTFREQ on events"));
CommandLineParser parser = new BasicParser();
CommandLine cmd = parser.parse(options, args);
if (cmd.hasOption("events") && cmd.hasOption("plot")){
String[] events = cmd.getOptionValues("events");
for (String eventName: events){
Event event = new BasicEvent(new File(eventName), conf);
String dimension = cmd.getOptionValue("plot");
if ("2d".equals(dimension)){
System.out.println(Plot2DProcessor.formatEvent(event, conf));
} else if ("1d".equals(dimension)) {
System.out.println(Plot1DProcessor.formatEvent(event, conf, false));
} else {
System.out.println(Plot1DProcessor.formatEvent(event, conf, true));
}
}
} else if (cmd.hasOption("events") && cmd.hasOption("xcorr")) {
String[] events = cmd.getOptionValues("events");
if (events.length < 2){
System.err.println("expected two event files to cross-correlate");
return;
}
Event e1 = new BasicEvent(new File(events[0]), conf);
Event e2 = new BasicEvent(new File(events[1]), conf);
double[] xcorr = Util.fftXCorr(e1, e2);
double best = Util.getHighest(xcorr);
System.out.println(Util.NF.format(best));
} else if (cmd.hasOption("events") && cmd.hasOption("fftdom")) {
String[] events = cmd.getOptionValues("events");
try (Writer w = new PrintWriter(System.out)){
for (String eventName: events){
Event event = new BasicEvent(new File(eventName), conf);
DominantFreqProcessor.handleEvent(conf, event, w);
w.flush();
}
}
} else {
HelpFormatter hf = new HelpFormatter();
hf.printHelp(100, "java -jar peakmatch.jar",null, options, null, true);
}
} catch (ParseException e) {
System.err.println("error parsing arguments");
System.err.println(e.getMessage());
} catch (IOException e) {
System.err.println("error writing");
System.err.println(e.getMessage());
}
}
private void run(EventProcessorConf conf) throws EventException {
long t0 = System.currentTimeMillis();
System.out.println();
System.out.println("*** Peakmatch ***");
System.out.println("read " + CONF_FILE + " ...");
// System.out.println(conf);
switch (conf.getMode()){
case ANALYSE:
new AnalyseProcessor(conf).process();
break; case PEAKMATCH:
new PeakMatchProcessor(conf).process();
break; case FFTPRECACHE:
new FFTPrecacheProcessor(conf).process();
break; case POSTPROCESS:
new PostProcessProcessor(conf).process();
break; case BRUTEFORCE:
new BruteForceProcessor(conf).process();
break; case FFTDOMINANTFREQ:
new DominantFreqProcessor(conf).process();
break; case PLOT2D:
new Plot2DProcessor(conf).process();
break; case PLOT1D:
new Plot1DProcessor(conf).process();
break; case CLUSTER:
new ClusteringProcessor(conf).process();
}
System.out.println("*** done [" + (System.currentTimeMillis()-t0) + " ms] ***");
}
// static final double BUCKET_DURATION_SECONDS = 1.5;
// static final double BAND_HZ = 0.5;
// private Map<Double, Double> getBandMeans(double[] d, Complex[] cs, EventProcessorConf conf) {
//
// double currentBand = -1;
// SummaryStatistics currentSS = null;
// Map<Double, SummaryStatistics> bands = Maps.newLinkedHashMap();
//
// double filterBelowIndex = d.length / conf.getDominantFreqSampleRate() * conf.getDominantFreqFilterBelowHz();
// double filterAboveIndex = d.length / conf.getDominantFreqSampleRate() * conf.getDominantFreqFilterAboveHz();
//
//// for (int ii=0; ii<cs.length; ii++){
// for (int ii = (int) filterBelowIndex; ii < Math.min(cs.length, (int) filterAboveIndex); ii++) {
// double abs = cs[ii].abs();
// double freq = Util.frequencyFromFFTPosition(ii, conf.getDominantFreqSampleRate(), d.length);
//
// double bandStart = BAND_HZ * (int)(freq / BAND_HZ);
//
// if (bandStart > currentBand) {
// currentSS = new SummaryStatistics();
// currentBand = bandStart;
// bands.put(bandStart, currentSS);
// }
//
// currentSS.addValue(abs);
// }
//
// Map<Double, Double> bandMeans = Maps.newLinkedHashMap();
// for (Map.Entry<Double, SummaryStatistics> e: bands.entrySet()){
// bandMeans.put(e.getKey(), e.getValue().getMean());
// }
// return bandMeans;
// }
// Util.executePerEvent(conf, new Util.EventAction() {
// @Override
// public void run(Event event) throws EventException {
//
// double[] d = event.getD();
//
// // zero pad to next power of two
// int len = Util.nextPowerOfTwo(d.length * 2);
// d = Arrays.copyOf(d, len);
//
// Complex[] cs = Util.FFTtransform(d);
// cs = Arrays.copyOf(cs, cs.length / 2); // second half is an inverted artifact of the transform, throw it away
//
// Map<Double, Double> bandMeans = getBandMeans(d, cs, conf);
//
// System.out.println(event.getName());
// for (Map.Entry<Double, Double> e: bandMeans.entrySet()){
// Double bandStart = e.getKey();
// Double mean = e.getValue();
//
// System.out.print(StringUtils.rightPad("" + bandStart, 8));
// for (double dd=0; dd<mean; dd+=0.05d){
// System.out.print("#");
// }
// System.out.println();
// }
// }
// });
}
| |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lb.wallpaper_picker_library;
import android.util.Log;
import java.util.WeakHashMap;
// BasicTexture is a Texture corresponds to a real GL texture.
// The state of a BasicTexture indicates whether its data is loaded to GL memory.
// If a BasicTexture is loaded into GL memory, it has a GL texture id.
public abstract class BasicTexture implements Texture {
@SuppressWarnings("unused")
private static final String TAG = "BasicTexture";
protected static final int UNSPECIFIED = -1;
protected static final int STATE_UNLOADED = 0;
protected static final int STATE_LOADED = 1;
protected static final int STATE_ERROR = -1;
// Log a warning if a texture is larger along a dimension
private static final int MAX_TEXTURE_SIZE = 4096;
protected int mId = -1;
protected int mState;
protected int mWidth = UNSPECIFIED;
protected int mHeight = UNSPECIFIED;
protected int mTextureWidth;
protected int mTextureHeight;
private boolean mHasBorder;
protected GLCanvas mCanvasRef = null;
private static WeakHashMap<BasicTexture, Object> sAllTextures
= new WeakHashMap<BasicTexture, Object>();
private static ThreadLocal sInFinalizer = new ThreadLocal();
protected BasicTexture(GLCanvas canvas,int id,int state) {
setAssociatedCanvas(canvas);
mId = id;
mState = state;
synchronized (sAllTextures) {
sAllTextures.put(this, null);
}
}
protected BasicTexture() {
this(null, 0, STATE_UNLOADED);
}
protected void setAssociatedCanvas(GLCanvas canvas) {
mCanvasRef = canvas;
}
/**
* Sets the content size of this texture. In OpenGL, the actual texture
* size must be of power of 2, the size of the content may be smaller.
*/
public void setSize(int width, int height) {
mWidth = width;
mHeight = height;
mTextureWidth = width > 0 ? Utils.nextPowerOf2(width) : 0;
mTextureHeight = height > 0 ? Utils.nextPowerOf2(height) : 0;
if (mTextureWidth > MAX_TEXTURE_SIZE || mTextureHeight > MAX_TEXTURE_SIZE) {
Log.w(TAG, String.format("texture is too large: %d x %d",
mTextureWidth, mTextureHeight), new Exception());
}
}
public boolean isFlippedVertically() {
return false;
}
public int getId() {
return mId;
}
@Override
public int getWidth() {
return mWidth;
}
@Override
public int getHeight() {
return mHeight;
}
// Returns the width rounded to the next power of 2.
public int getTextureWidth() {
return mTextureWidth;
}
// Returns the height rounded to the next power of 2.
public int getTextureHeight() {
return mTextureHeight;
}
// Returns true if the texture has one pixel transparent border around the
// actual content. This is used to avoid jigged edges.
//
// The jigged edges appear because we use GL_CLAMP_TO_EDGE for texture wrap
// mode (GL_CLAMP is not available in OpenGL ES), so a pixel partially
// covered by the texture will use the color of the edge texel. If we add
// the transparent border, the color of the edge texel will be mixed with
// appropriate amount of transparent.
//
// Currently our background is black, so we can draw the thumbnails without
// enabling blending.
public boolean hasBorder() {
return mHasBorder;
}
protected void setBorder(boolean hasBorder) {
mHasBorder = hasBorder;
}
@Override
public void draw(GLCanvas canvas, int x, int y) {
canvas.drawTexture(this, x, y, getWidth(), getHeight());
}
@Override
public void draw(GLCanvas canvas, int x, int y, int w, int h) {
canvas.drawTexture(this, x, y, w, h);
}
// onBind is called before GLCanvas binds this texture.
// It should make sure the data is uploaded to GL memory.
abstract protected boolean onBind(GLCanvas canvas);
// Returns the GL texture target for this texture (e.g. GL_TEXTURE_2D).
abstract protected int getTarget();
public boolean isLoaded() {
return mState == STATE_LOADED;
}
// recycle() is called when the texture will never be used again,
// so it can free all resources.
public void recycle() {
freeResource();
}
// yield() is called when the texture will not be used temporarily,
// so it can free some resources.
// The default implementation unloads the texture from GL memory, so
// the subclass should make sure it can reload the texture to GL memory
// later, or it will have to override this method.
public void yield() {
freeResource();
}
private void freeResource() {
GLCanvas canvas = mCanvasRef;
if (canvas != null && mId != -1) {
canvas.unloadTexture(this);
mId = -1; // Don't free it again.
}
mState = STATE_UNLOADED;
setAssociatedCanvas(null);
}
@Override
protected void finalize() {
sInFinalizer.set(BasicTexture.class);
recycle();
sInFinalizer.set(null);
}
// This is for deciding if we can call Bitmap's recycle().
// We cannot call Bitmap's recycle() in finalizer because at that point
// the finalizer of Bitmap may already be called so recycle() will crash.
public static boolean inFinalizer() {
return sInFinalizer.get() != null;
}
public static void yieldAllTextures() {
synchronized (sAllTextures) {
for (BasicTexture t : sAllTextures.keySet()) {
t.yield();
}
}
}
public static void invalidateAllTextures() {
synchronized (sAllTextures) {
for (BasicTexture t : sAllTextures.keySet()) {
t.mState = STATE_UNLOADED;
t.setAssociatedCanvas(null);
}
}
}
}
| |
package org.jenkins.plugins.statistics.listeners;
import org.jenkins.plugins.statistics.model.StatsQueue;
import org.jenkins.plugins.statistics.model.QueueCauase;
import org.jenkins.plugins.statistics.util.*;
import hudson.Extension;
import hudson.model.Cause;
import hudson.model.Queue;
import hudson.model.Queue.Item;
import hudson.model.queue.QueueListener;
import hudson.triggers.SCMTrigger;
import hudson.triggers.TimerTrigger;
import jenkins.model.Jenkins;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Created by hthakkallapally on 3/13/2015.
*/
@Extension
public class StatsQueueListener extends QueueListener {
private static final Logger LOGGER = Logger.getLogger(
StatsQueueListener.class.getName());
public StatsQueueListener() {
}
@Override
public void onEnterWaiting(Queue.WaitingItem wi) {
try {
wi.getInQueueSince();
StatsQueue queue = getCiQueue(wi);
addStartedBy(wi, queue);
queue.setEntryTime(new Date());
queue.setExitTime(null);
queue.setStatus(Constants.ENTERED);
if (wi.getCauseOfBlockage() != null) {
addEntryQueueCause("waiting", wi, queue);
}
RestClientUtil.postToService(getRestUrl(), queue);
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Failed to add Queue info for " +
"job "+wi.task.getFullDisplayName()+
" with queue id "+wi.id + " using "+ getRestUrl(), e);
}
}
private void addEntryQueueCause(String name, Item wi,
StatsQueue queue) {
QueueCauase cause = new QueueCauase();
cause.setEntryTime(new Date());
cause.setExitTime(null);
cause.setReasonForWaiting(wi.getCauseOfBlockage().getShortDescription());
Map<String, QueueCauase> map = new HashMap<String, QueueCauase>();
map.put(name, cause);
queue.setQueueCauses(map);
}
@Override
public void onLeaveWaiting(Queue.WaitingItem wi) {
try {
wi.getInQueueSince();
StatsQueue queue = getCiQueue(wi);
if (wi.getCauseOfBlockage() != null) {
addExitQueueCause("waiting", wi, queue);
}
//PUT URL is pointing to /api/queues instead of/api/queues/id
// as id will be reset to 1 each time you restart the Jenkins CI.
// Hence the logic to update the correct queue record should be handled
// at client side.
RestClientUtil.putToService(getRestUrl(), queue);
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Failed to add Queue info for " +
"job "+wi.task.getFullDisplayName()+
" with queue id "+wi.id + " using "+ getRestUrl(), e);
}
}
private void addExitQueueCause(String name, Item wi, StatsQueue queue) {
QueueCauase cause = new QueueCauase();
cause.setEntryTime(null);
cause.setExitTime(new Date());
cause.setReasonForWaiting(wi.getCauseOfBlockage().getShortDescription());
Map<String, QueueCauase> map = new HashMap<String, QueueCauase>();
map.put(name, cause);
queue.setQueueCauses(map);
}
/**
* onEnterBlocked is used to update Reason for waiting in Queue.
* for ex. "Build #35 is already in progress (ETA:9 min 3 sec)"
*
* @param bi
*/
@Override
public void onEnterBlocked(Queue.BlockedItem bi) {
try {
StatsQueue queue = getCiQueue(bi);
if (bi.getCauseOfBlockage() != null) {
addEntryQueueCause("blocked", bi, queue);
}
//PUT URL is pointing to /api/queues instead of/api/queues/id
// as id will be reset to 1 each time you restart the Jenkins CI.
// Hence the logic to update the correct queue record should be handled
// at client side.
RestClientUtil.putToService(getRestUrl(), queue);
} catch (Exception e) {
LOGGER.log(Level.WARNING, "JOb "+bi.task.getFullDisplayName()+
" with queue id "+bi.id+
"failed with exception : " + e);
}
}
@Override
public void onLeaveBlocked(Queue.BlockedItem bi) {
try {
StatsQueue queue = getCiQueue(bi);
if (bi.getCauseOfBlockage() != null) {
addExitQueueCause("blocked", bi, queue);
}
//PUT URL is pointing to /api/queues instead of/api/queues/id
// as id will be reset to 1 each time you restart the Jenkins CI.
// Hence the logic to update the correct queue record should be handled
// at client side.
RestClientUtil.putToService(getRestUrl(), queue);
} catch (Exception e) {
LOGGER.log(Level.WARNING, "JOb "+bi.task.getFullDisplayName()+
" with queue id "+bi.id+
"failed with exception : " + e);
}
}
/**
* onEnterBlocked is used to update Reason for waiting in Queue.
* for ex. "Waiting for next available executor"
*
* @param bi
*/
@Override
public void onEnterBuildable(Queue.BuildableItem bi) {
try {
StatsQueue queue = getCiQueue(bi);
if (bi.getCauseOfBlockage() != null) {
addEntryQueueCause("buildable", bi, queue);
}
//PUT URL is pointing to /api/queues instead of/api/queues/id
// as id will be reset to 1 each time you restart the Jenkins CI.
// Hence the logic to update the correct queue record should be handled
// at client side.
RestClientUtil.putToService(getRestUrl(), queue);
} catch (Exception e) {
LOGGER.log(Level.WARNING, "JOb "+bi.task.getFullDisplayName()+
" with queue id "+bi.id+
"failed with exception : " + e);
}
}
@Override
public void onLeaveBuildable(Queue.BuildableItem bi) {
try {
StatsQueue queue = getCiQueue(bi);
if (bi.getCauseOfBlockage() != null) {
addExitQueueCause("buildable", bi, queue);
}
//PUT URL is pointing to /api/queues instead of/api/queues/id
// as id will be reset to 1 each time you restart the Jenkins CI.
// Hence the logic to update the correct queue record should be handled
// at client side.
RestClientUtil.putToService(getRestUrl(), queue);
} catch (Exception e) {
LOGGER.log(Level.WARNING, "JOb "+bi.task.getFullDisplayName()+
" with queue id "+bi.id+
"failed with exception : " + e);
}
}
/**
* Construct REST API url for queue resource.
*
* @return
*/
private String getRestUrl() {
String endPoint = PropertyLoader.getStatsEndPoint();
String queueRes = PropertyLoader.getEnvironmentProperty(
"statistics.resource.queue");
return endPoint + queueRes;
}
/**
* Returns a queue model object from Jenkins queue.
*
* @param wi
* @return
*/
private StatsQueue getCiQueue(Item wi) {
StatsQueue queue = new StatsQueue();
String ciUrl = Jenkins.getInstance() != null
? Jenkins.getInstance().getRootUrl()
: "";
queue.setCiUrl(ciUrl);
queue.setJobName(wi.task.getFullDisplayName());
queue.setJenkinsQueueId(wi.id);
return queue;
}
/**
* Adds the Started By information to the Queue.
*
* @param wi
* @param queue
*/
private void addStartedBy(Item wi, StatsQueue queue) {
List<Cause> causes = wi.getCauses();
for (Cause cause : causes) {
if (cause instanceof Cause.UserIdCause
|| cause instanceof Cause.UserCause) {
queue.setStartedBy(((Cause.UserIdCause) cause).getUserName());
break;
} else if (cause instanceof Cause.UpstreamCause) {
queue.setStartedBy(JenkinsCauses.UPSTREAM);
break;
} else if (cause instanceof SCMTrigger.SCMTriggerCause) {
queue.setStartedBy(JenkinsCauses.SCM);
break;
} else if (cause instanceof TimerTrigger.TimerTriggerCause) {
queue.setStartedBy(JenkinsCauses.TIMER);
break;
}
}
}
@Override
public void onLeft(Queue.LeftItem li) {
try {
StatsQueue queue = getCiQueue(li);
// We have already set entry time in onEnterWaiting(). No need
// to set it again.
queue.setEntryTime(null);
queue.setExitTime(new Date());
queue.setStatus(Constants.LEFT);
queue.setDurationStr(li.getInQueueForString());
queue.setDuration(System.currentTimeMillis() - li.getInQueueSince());
//PUT URL is pointing to /api/queues instead of/api/queues/id
// as id will be reset to 1 each time you restart the Jenkins CI.
// Hence the logic to update the correct queue record should be handled
// at client side.
RestClientUtil.putToService(getRestUrl(), queue);
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Failed to add Queue info for " +
"job "+li.task.getFullDisplayName()+
" with queue id "+li.id + " using "+ getRestUrl(), e);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Table;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.rest.action.cat.AbstractCatAction;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestChannel;
import org.elasticsearch.test.rest.FakeRestRequest;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.hamcrest.core.StringContains.containsString;
import static org.hamcrest.object.HasToString.hasToString;
import static org.mockito.Mockito.mock;
public class BaseRestHandlerTests extends ESTestCase {
public void testOneUnconsumedParameters() throws Exception {
final AtomicBoolean executed = new AtomicBoolean();
BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) {
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
request.param("consumed");
return channel -> executed.set(true);
}
@Override
public String getName() {
return "test_one_unconsumed_response_action";
}
};
final HashMap<String, String> params = new HashMap<>();
params.put("consumed", randomAlphaOfLength(8));
params.put("unconsumed", randomAlphaOfLength(8));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build();
RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1);
final IllegalArgumentException e =
expectThrows(IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mock(NodeClient.class)));
assertThat(e, hasToString(containsString("request [/] contains unrecognized parameter: [unconsumed]")));
assertFalse(executed.get());
}
public void testMultipleUnconsumedParameters() throws Exception {
final AtomicBoolean executed = new AtomicBoolean();
BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) {
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
request.param("consumed");
return channel -> executed.set(true);
}
@Override
public String getName() {
return "test_multiple_unconsumed_response_action";
}
};
final HashMap<String, String> params = new HashMap<>();
params.put("consumed", randomAlphaOfLength(8));
params.put("unconsumed-first", randomAlphaOfLength(8));
params.put("unconsumed-second", randomAlphaOfLength(8));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build();
RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1);
final IllegalArgumentException e =
expectThrows(IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mock(NodeClient.class)));
assertThat(e, hasToString(containsString("request [/] contains unrecognized parameters: [unconsumed-first], [unconsumed-second]")));
assertFalse(executed.get());
}
public void testUnconsumedParametersDidYouMean() throws Exception {
final AtomicBoolean executed = new AtomicBoolean();
BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) {
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
request.param("consumed");
request.param("field");
request.param("tokenizer");
request.param("very_close_to_parameter_1");
request.param("very_close_to_parameter_2");
return channel -> executed.set(true);
}
@Override
protected Set<String> responseParams() {
return Collections.singleton("response_param");
}
@Override
public String getName() {
return "test_unconsumed_did_you_mean_response_action";
}
};
final HashMap<String, String> params = new HashMap<>();
params.put("consumed", randomAlphaOfLength(8));
params.put("flied", randomAlphaOfLength(8));
params.put("respones_param", randomAlphaOfLength(8));
params.put("tokenzier", randomAlphaOfLength(8));
params.put("very_close_to_parametre", randomAlphaOfLength(8));
params.put("very_far_from_every_consumed_parameter", randomAlphaOfLength(8));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build();
RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1);
final IllegalArgumentException e =
expectThrows(IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mock(NodeClient.class)));
assertThat(
e,
hasToString(containsString(
"request [/] contains unrecognized parameters: " +
"[flied] -> did you mean [field]?, " +
"[respones_param] -> did you mean [response_param]?, " +
"[tokenzier] -> did you mean [tokenizer]?, " +
"[very_close_to_parametre] -> did you mean any of [very_close_to_parameter_1, very_close_to_parameter_2]?, " +
"[very_far_from_every_consumed_parameter]")));
assertFalse(executed.get());
}
public void testUnconsumedResponseParameters() throws Exception {
final AtomicBoolean executed = new AtomicBoolean();
BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) {
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
request.param("consumed");
return channel -> executed.set(true);
}
@Override
protected Set<String> responseParams() {
return Collections.singleton("response_param");
}
@Override
public String getName() {
return "test_unconsumed_response_action";
}
};
final HashMap<String, String> params = new HashMap<>();
params.put("consumed", randomAlphaOfLength(8));
params.put("response_param", randomAlphaOfLength(8));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build();
RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1);
handler.handleRequest(request, channel, mock(NodeClient.class));
assertTrue(executed.get());
}
public void testDefaultResponseParameters() throws Exception {
final AtomicBoolean executed = new AtomicBoolean();
BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) {
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
return channel -> executed.set(true);
}
@Override
public String getName() {
return "test_default_response_action";
}
};
final HashMap<String, String> params = new HashMap<>();
params.put("format", randomAlphaOfLength(8));
params.put("filter_path", randomAlphaOfLength(8));
params.put("pretty", randomFrom("true", "false", "", null));
params.put("human", null);
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build();
RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1);
handler.handleRequest(request, channel, mock(NodeClient.class));
assertTrue(executed.get());
}
public void testCatResponseParameters() throws Exception {
final AtomicBoolean executed = new AtomicBoolean();
AbstractCatAction handler = new AbstractCatAction(Settings.EMPTY) {
@Override
protected RestChannelConsumer doCatRequest(RestRequest request, NodeClient client) {
return channel -> executed.set(true);
}
@Override
protected void documentation(StringBuilder sb) {
}
@Override
protected Table getTableWithHeader(RestRequest request) {
return null;
}
@Override
public String getName() {
return "test_cat_response_action";
}
};
final HashMap<String, String> params = new HashMap<>();
params.put("format", randomAlphaOfLength(8));
params.put("h", randomAlphaOfLength(8));
params.put("v", randomAlphaOfLength(8));
params.put("ts", randomAlphaOfLength(8));
params.put("pri", randomAlphaOfLength(8));
params.put("bytes", randomAlphaOfLength(8));
params.put("size", randomAlphaOfLength(8));
params.put("time", randomAlphaOfLength(8));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build();
RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1);
handler.handleRequest(request, channel, mock(NodeClient.class));
assertTrue(executed.get());
}
public void testConsumedBody() throws Exception {
final AtomicBoolean executed = new AtomicBoolean();
final BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) {
@Override
protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
request.content();
return channel -> executed.set(true);
}
@Override
public String getName() {
return "test_consumed_body";
}
};
try (XContentBuilder builder = JsonXContent.contentBuilder().startObject().endObject()) {
final RestRequest request = new FakeRestRequest.Builder(xContentRegistry())
.withContent(new BytesArray(builder.toString()), XContentType.JSON)
.build();
final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1);
handler.handleRequest(request, channel, mock(NodeClient.class));
assertTrue(executed.get());
}
}
public void testUnconsumedNoBody() throws Exception {
final AtomicBoolean executed = new AtomicBoolean();
final BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) {
@Override
protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
return channel -> executed.set(true);
}
@Override
public String getName() {
return "test_unconsumed_body";
}
};
final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).build();
final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1);
handler.handleRequest(request, channel, mock(NodeClient.class));
assertTrue(executed.get());
}
public void testUnconsumedBody() throws Exception {
final AtomicBoolean executed = new AtomicBoolean();
final BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) {
@Override
protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
return channel -> executed.set(true);
}
@Override
public String getName() {
return "test_unconsumed_body";
}
};
try (XContentBuilder builder = JsonXContent.contentBuilder().startObject().endObject()) {
final RestRequest request = new FakeRestRequest.Builder(xContentRegistry())
.withContent(new BytesArray(builder.toString()), XContentType.JSON)
.build();
final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1);
handler.handleRequest(request, channel, mock(NodeClient.class));
assertWarnings("request [GET /] does not support having a body; Elasticsearch 7.x+ will reject such requests");
assertTrue(executed.get());
}
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cli;
import com.facebook.buck.event.CompilerErrorEvent;
import com.facebook.buck.event.ProjectGenerationEvent;
import com.facebook.buck.httpserver.WebServerBuckEventListener;
import com.facebook.buck.model.BuildId;
import com.facebook.buck.parser.ParseEvent;
import com.facebook.buck.rules.BuildEvent;
import com.facebook.buck.rules.IndividualTestEvent;
import com.facebook.buck.rules.TestRunEvent;
import com.facebook.buck.step.StepEvent;
import com.facebook.buck.testutil.integration.DebuggableTemporaryFolder;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestContext;
import com.facebook.buck.testutil.integration.TestDataHelper;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.verify;
import org.easymock.EasyMock;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
/**
* This tests capture the expectations of the intellij buck plugin.
* Upon modification, please inform the team that is currently maintaining the intellij buck plugin.
*/
public class WebServerBuckEventListenerTest {
@Rule
public DebuggableTemporaryFolder tmp = new DebuggableTemporaryFolder();
@Test
public void hasBuckBuildStartedThenEventsCalled() throws IOException, InterruptedException {
final ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "buck_events", tmp);
workspace.setUp();
WebServerBuckEventListener webServerBuckEventListener =
createMock(WebServerBuckEventListener.class);
//Build started
webServerBuckEventListener.buildStarted(anyObject(BuildEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Build progress Event
webServerBuckEventListener
.buildProgressUpdated(anyObject(ProgressEvent.BuildProgressUpdated.class));
EasyMock.expectLastCall().atLeastOnce();
//Build finished
webServerBuckEventListener.buildFinished((BuildEvent.Finished) anyObject());
EasyMock.expectLastCall().times(1);
//Parse started
webServerBuckEventListener.parseStarted(anyObject(ParseEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Parse progress Event
webServerBuckEventListener
.parsingProgressUpdated((ProgressEvent.ParsingProgressUpdated) anyObject());
EasyMock.expectLastCall().atLeastOnce();
//Parse finished
webServerBuckEventListener.parseFinished((ParseEvent.Finished) anyObject());
EasyMock.expectLastCall().times(1);
//Step started
// this project has only 1 step
webServerBuckEventListener.stepStarted(anyObject(StepEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Step finished
webServerBuckEventListener.stepFinished((StepEvent.Finished) anyObject());
EasyMock.expectLastCall().times(1);
//Output trace
webServerBuckEventListener.outputTrace(anyObject(BuildId.class));
EasyMock.expectLastCall().times(1);
EasyMock.replay(webServerBuckEventListener);
ProjectWorkspace.ProcessResult build =
workspace.runBuckdCommand(
new TestContext(),
webServerBuckEventListener,
"build",
"//:foo");
build.assertSuccess();
verify(webServerBuckEventListener);
}
@Test
public void hasBuckTestStartedThenEventsCalled() throws IOException, InterruptedException {
final ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "buck_events/test", tmp);
workspace.setUp();
WebServerBuckEventListener webServerBuckEventListener =
createMock(WebServerBuckEventListener.class);
//Build started
webServerBuckEventListener.buildStarted(anyObject(BuildEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Build progress Event
webServerBuckEventListener
.buildProgressUpdated(anyObject(ProgressEvent.BuildProgressUpdated.class));
EasyMock.expectLastCall().atLeastOnce();
//Build finished
webServerBuckEventListener.buildFinished(anyObject(BuildEvent.Finished.class));
EasyMock.expectLastCall().times(1);
//Parse started
webServerBuckEventListener.parseStarted(anyObject(ParseEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Parse progress Event
webServerBuckEventListener
.parsingProgressUpdated(anyObject(ProgressEvent.ParsingProgressUpdated.class));
EasyMock.expectLastCall().atLeastOnce();
//Parse finished
webServerBuckEventListener.parseFinished(anyObject(ParseEvent.Finished.class));
EasyMock.expectLastCall().times(1);
//Step started
//This target has only 1 step
webServerBuckEventListener.stepStarted(anyObject(StepEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Step finished
webServerBuckEventListener.stepFinished(anyObject(StepEvent.Finished.class));
EasyMock.expectLastCall().times(1);
//Individual test started
//This target has only 1 test
webServerBuckEventListener.testAwaitingResults(anyObject(IndividualTestEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Individual test finished
webServerBuckEventListener.testResultsAvailable(anyObject(IndividualTestEvent.Finished.class));
EasyMock.expectLastCall().times(1);
//Test started
webServerBuckEventListener.testRunStarted(anyObject(TestRunEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Test finished
webServerBuckEventListener.testRunCompleted(anyObject(TestRunEvent.Finished.class));
EasyMock.expectLastCall().times(1);
//Output trace
webServerBuckEventListener.outputTrace(anyObject(BuildId.class));
EasyMock.expectLastCall().times(1);
EasyMock.replay(webServerBuckEventListener);
ProjectWorkspace.ProcessResult build =
workspace.runBuckdCommand(
new TestContext(),
webServerBuckEventListener,
"test",
"//:simple_test");
build.assertSuccess();
verify(webServerBuckEventListener);
}
@Test
public void hasBuckCompilerErrorOccurredThenEventsCalled()
throws IOException, InterruptedException {
final ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "buck_events/compiler_error", tmp);
workspace.setUp();
WebServerBuckEventListener webServerBuckEventListener =
createMock(WebServerBuckEventListener.class);
//Build started
webServerBuckEventListener.buildStarted(anyObject(BuildEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Build progress Event
webServerBuckEventListener
.buildProgressUpdated(anyObject(ProgressEvent.BuildProgressUpdated.class));
EasyMock.expectLastCall().atLeastOnce();
//Build finished
webServerBuckEventListener.buildFinished(anyObject(BuildEvent.Finished.class));
EasyMock.expectLastCall().times(1);
//Parse started
webServerBuckEventListener.parseStarted(anyObject(ParseEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Parse progress Event
webServerBuckEventListener
.parsingProgressUpdated(anyObject(ProgressEvent.ParsingProgressUpdated.class));
EasyMock.expectLastCall().atLeastOnce();
//Parse finished
webServerBuckEventListener.parseFinished(anyObject(ParseEvent.Finished.class));
EasyMock.expectLastCall().times(1);
//Step started
//This target has only 1 step
webServerBuckEventListener.stepStarted(anyObject(StepEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Step finished
webServerBuckEventListener.stepFinished(anyObject(StepEvent.Finished.class));
EasyMock.expectLastCall().times(1);
//Compiler error
webServerBuckEventListener.compilerErrorEvent(anyObject(CompilerErrorEvent.class));
EasyMock.expectLastCall().times(1);
//Output trace
webServerBuckEventListener.outputTrace(anyObject(BuildId.class));
EasyMock.expectLastCall().times(1);
EasyMock.replay(webServerBuckEventListener);
ProjectWorkspace.ProcessResult build =
workspace.runBuckdCommand(
new TestContext(),
webServerBuckEventListener,
"build",
"//:broken");
build.assertFailure();
verify(webServerBuckEventListener);
}
@Test
public void hasBuckProjectGenerationStartedThenEventsCalled()
throws IOException, InterruptedException {
final ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "buck_events", tmp);
workspace.setUp();
WebServerBuckEventListener webServerBuckEventListener =
createMock(WebServerBuckEventListener.class);
//Parse started
webServerBuckEventListener.parseStarted(anyObject(ParseEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Parse progress Event
webServerBuckEventListener
.parsingProgressUpdated(anyObject(ProgressEvent.ParsingProgressUpdated.class));
EasyMock.expectLastCall().atLeastOnce();
//Parse finished
webServerBuckEventListener.parseFinished(anyObject(ParseEvent.Finished.class));
EasyMock.expectLastCall().times(1);
//Project generation started
webServerBuckEventListener
.projectGenerationStarted(
anyObject(ProjectGenerationEvent.Started.class));
EasyMock.expectLastCall().times(1);
//Project generation finished
webServerBuckEventListener
.projectGenerationFinished(
anyObject(ProjectGenerationEvent.Finished.class));
EasyMock.expectLastCall().times(1);
//Output trace
webServerBuckEventListener.outputTrace(anyObject(BuildId.class));
EasyMock.expectLastCall().times(1);
EasyMock.replay(webServerBuckEventListener);
ProjectWorkspace.ProcessResult build =
workspace.runBuckdCommand(
new TestContext(),
webServerBuckEventListener,
"project",
"//:foo");
build.assertSuccess();
verify(webServerBuckEventListener);
}
}
| |
/*
Derby - Class org.apache.derby.impl.sql.compile.ConstantNode
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.sql.compile;
import org.apache.derby.iapi.types.DataValueDescriptor;
import org.apache.derby.iapi.types.TypeId;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.services.compiler.MethodBuilder;
import org.apache.derby.iapi.services.compiler.LocalField;
import org.apache.derby.iapi.services.sanity.SanityManager;
import org.apache.derby.impl.sql.compile.ExpressionClassBuilder;
import org.apache.derby.iapi.store.access.Qualifier;
import org.apache.derby.iapi.util.ReuseFactory;
import java.util.Vector;
/**
* ConstantNode holds literal constants as well as nulls.
* <p>
* A NULL from the parser may not yet know its type; that
* must be set during binding, as it is for parameters.
* <p>
* the DataValueDescriptor methods want to throw exceptions
* when they are of the wrong type, but to do that they
* must check typeId when the value is null, rather than
* the instanceof check they do for returning a valid value.
* <p>
* For code generation, we generate a static field. Then we set the
* field be the proper constant expression (something like <code>
* getDatavalueFactory().getCharDataValue("hello", ...)) </code>)
* in the constructor of the generated method. Ideally
* we would have just
*/
abstract class ConstantNode extends ValueNode
{
DataValueDescriptor value;
/*
** In case generateExpression() is called twice (something
** that probably wont happen but might), we will cache
** our generated expression and just return a reference
** to the field that holds our value (instead of having
** two fields holding the same constant).
*/
/**
* Initializer for non-numeric types
*
* @param typeId The Type ID of the datatype
* @param nullable True means the constant is nullable
* @param maximumWidth The maximum number of bytes in the data value
*
* @exception StandardException
*/
public void init(
Object typeId,
Object nullable,
Object maximumWidth)
throws StandardException
{
setType((TypeId) typeId,
((Boolean) nullable).booleanValue(),
((Integer) maximumWidth).intValue());
}
/**
* Constructor for untyped nodes, which contain little information
*
*/
ConstantNode()
{
super();
}
/**
* Set the value in this ConstantNode.
*/
void setValue(DataValueDescriptor value)
{
this.value = value;
}
/**
* Get the value in this ConstantNode
*/
public DataValueDescriptor getValue()
{
return value;
}
/**
* Convert this object to a String. See comments in QueryTreeNode.java
* for how this should be done for tree printing.
*
* @return This object as a String
*/
public String toString()
{
if (SanityManager.DEBUG)
{
return "value: " + value + "\n" +
super.toString();
}
else
{
return "";
}
}
/**
* Return whether or not this expression tree is cloneable.
*
* @return boolean Whether or not this expression tree is cloneable.
*/
public boolean isCloneable()
{
return true;
}
/**
* Return a clone of this node.
*
* @return ValueNode A clone of this node.
*
*/
public ValueNode getClone()
{
/* All constants can simply be reused */
return this;
}
/**
* Bind this expression. This means binding the sub-expressions,
* as well as figuring out what the return type is for this expression.
* In this case, there are no sub-expressions, and the return type
* is already known, so this is just a stub.
*
* @param fromList The FROM list for the query this
* expression is in, for binding columns.
* @param subqueryList The subquery list being built as we find SubqueryNodes
* @param aggregateVector The aggregate vector being built as we find AggregateNodes
*
* @return The new top of the expression tree.
*
* @exception StandardException Thrown on error. Although this class
* doesn't throw this exception, it's subclasses do and hence this method
* signature here needs to have throws StandardException
*/
public ValueNode bindExpression(
FromList fromList, SubqueryList subqueryList,
Vector aggregateVector)
throws StandardException
{
/*
** This has to be here for binding to work, but it doesn't
** have to do anything, because the datatypes of constant nodes
** are pre-generated by the parser.
*/
return this;
}
/**
* Return whether or not this expression tree represents a constant expression.
*
* @return Whether or not this expression tree represents a constant expression.
*/
public boolean isConstantExpression()
{
return true;
}
/** @see ValueNode#constantExpression */
public boolean constantExpression(PredicateList whereClause)
{
return true;
}
/**
* For a ConstantNode, we generate the equivalent literal value.
* A null is generated as a Null value cast to the type of
* the constant node.
* The subtypes of ConstantNode generate literal expressions
* for non-null values.
*
* @param acb The ExpressionClassBuilder for the class being built
* @param mb The method the code to place the code
*
* @exception StandardException Thrown on error
*/
public void generateExpression
(
ExpressionClassBuilder acb,
MethodBuilder mb
) throws StandardException
{
/* Are we generating a SQL null value? */
if (isNull())
{
acb.generateNull(mb, getTypeCompiler(),
getTypeServices().getCollationType());
}
else
{
generateConstant(acb, mb); // ask sub type to give a constant,
// usually a literal like 'hello'
acb.generateDataValue(mb, getTypeCompiler(),
getTypeServices().getCollationType(), (LocalField) null);
}
}
/**
* This generates the proper constant. It is implemented
* by every specific constant node (e.g. IntConstantNode).
*
* @param acb The ExpressionClassBuilder for the class being built
* @param mb The method the code to place the code
*
* @exception StandardException Thrown on error
*/
abstract void generateConstant(ExpressionClassBuilder acb, MethodBuilder mb)
throws StandardException;
/**
* Return whether or not this node represents a typed null constant.
*
*/
boolean isNull()
{
return (value == null || value.isNull());
}
/**
* Return the variant type for the underlying expression.
* The variant type can be:
* VARIANT - variant within a scan
* (method calls and non-static field access)
* SCAN_INVARIANT - invariant within a scan
* (column references from outer tables)
* QUERY_INVARIANT - invariant within the life of a query
* VARIANT - immutable
*
* @return The variant type for the underlying expression.
*/
protected int getOrderableVariantType()
{
// Constants are constant for the life of the query
return Qualifier.CONSTANT;
}
protected boolean isEquivalent(ValueNode o) throws StandardException
{
if (isSameNodeType(o)) {
ConstantNode other = (ConstantNode)o;
// value can be null which represents a SQL NULL value.
return ( (other.getValue() == null && getValue() == null) ||
(other.getValue() != null &&
other.getValue().compare(getValue()) == 0) );
}
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.common.persistence;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Collection;
import org.apache.commons.io.FileUtils;
import org.apache.kylin.common.KylinConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FileResourceStore extends ResourceStore {
private static final Logger logger = LoggerFactory.getLogger(FileResourceStore.class);
File root;
int failPutResourceCountDown = Integer.MAX_VALUE;
int failVisitFolderCountDown = Integer.MAX_VALUE;
public FileResourceStore(KylinConfig kylinConfig) {
super(kylinConfig);
root = new File(getPath(kylinConfig)).getAbsoluteFile();
if (root.exists() == false)
throw new IllegalArgumentException(
"File not exist by '" + kylinConfig.getMetadataUrl() + "': " + root.getAbsolutePath());
}
protected String getPath(KylinConfig kylinConfig) {
return kylinConfig.getMetadataUrl().getIdentifier();
}
@Override
protected boolean existsImpl(String resPath) throws IOException {
File f = file(resPath);
return f.exists() && f.isFile(); // directory is not considered a resource
}
@Override
protected void visitFolderImpl(String folderPath, boolean recursive, VisitFilter filter, boolean loadContent,
Visitor visitor) throws IOException {
if (--failVisitFolderCountDown == 0)
throw new IOException("for test");
File file = file(folderPath);
if (!file.exists() || !file.isDirectory())
return;
String prefix = fixWinPath(file);
Collection<File> files = FileUtils.listFiles(file, null, recursive);
for (File f : files) {
String path = fixWinPath(f);
if (!path.startsWith(prefix))
throw new IllegalStateException("File path " + path + " is supposed to start with " + prefix);
String resPath = folderPath.equals("/") ? path.substring(prefix.length())
: folderPath + path.substring(prefix.length());
if (filter.matches(resPath, f.lastModified())) {
RawResource raw = loadContent ? new RawResource(resPath, f.lastModified(), new FileInputStream(f))
: new RawResource(resPath, f.lastModified());
try {
visitor.visit(raw);
} finally {
raw.close();
}
}
}
}
private String fixWinPath(File file) {
String path = file.getAbsolutePath();
if (path.length() > 2 && path.charAt(1) == ':' && path.charAt(2) == '\\')
path = path.replace('\\', '/');
return path;
}
@Override
protected RawResource getResourceImpl(String resPath) throws IOException {
File f = file(resPath);
if (f.exists() && f.isFile()) {
if (f.length() == 0) {
logger.warn("Zero length file: {}. ", f.getAbsolutePath());
}
return new RawResource(resPath, f.lastModified(), new FileInputStream(f));
} else {
return null;
}
}
@Override
protected long getResourceTimestampImpl(String resPath) throws IOException {
File f = file(resPath);
if (f.exists() && f.isFile())
return f.lastModified();
else
return 0;
}
@Override
protected void putResourceImpl(String resPath, ContentWriter content, long ts) throws IOException {
if (--failPutResourceCountDown == 0)
throw new IOException("for test");
File tmp = File.createTempFile("kylin-fileresource-", ".tmp");
try {
try (FileOutputStream out = new FileOutputStream(tmp); DataOutputStream dout = new DataOutputStream(out)) {
content.write(dout);
dout.flush();
}
File f = file(resPath);
f.getParentFile().mkdirs();
if (!tmp.renameTo(f)) {
f.delete();
for (int i = 0; f.exists() && i < 3; i++) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
f.delete();
}
FileUtils.moveFile(tmp, f);
}
f.setLastModified(ts);
} finally {
if (tmp.exists())
FileUtils.forceDelete(tmp);
}
}
@Override
protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS)
throws IOException, WriteConflictException {
File f = file(resPath);
if ((f.exists() && f.lastModified() != oldTS) || (f.exists() == false && oldTS != 0))
throw new WriteConflictException(
"Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but found " + f.lastModified());
putResourceImpl(resPath, ContentWriter.create(content), newTS);
return f.lastModified();
}
@Override
protected void deleteResourceImpl(String resPath) throws IOException {
File f = file(resPath);
try {
if (f.exists())
FileUtils.forceDelete(f);
} catch (FileNotFoundException e) {
// FileNotFoundException is not a problem in case of delete
}
}
@Override
protected String getReadableResourcePathImpl(String resPath) {
return file(resPath).toString();
}
private File file(String resPath) {
if (resPath.equals("/"))
return root;
else
return new File(root, resPath);
}
@Override
public String toString() {
return root.getAbsolutePath();
}
}
| |
package mpi.topo;
/****************************************************************************
MESSAGE PASSING INTERFACE TEST CASE SUITE
Copyright IBM Corp. 1995
IBM Corp. hereby grants a non-exclusive license to use, copy, modify, and
distribute this software for any purpose and without fee provided that the
above copyright notice and the following paragraphs appear in all copies.
IBM Corp. makes no representation that the test cases comprising this
suite are correct or are an accurate representation of any standard.
In no event shall IBM be liable to any party for direct, indirect, special
incidental, or consequential damage arising out of the use of this software
even if IBM Corp. has been advised of the possibility of such damage.
IBM CORP. SPECIFICALLY DISCLAIMS ANY WARRANTIES INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS AND IBM
CORP. HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES,
ENHANCEMENTS, OR MODIFICATIONS.
****************************************************************************
These test cases reflect an interpretation of the MPI Standard. They are
are, in most cases, unit tests of specific MPI behaviors. If a user of any
test case from this set believes that the MPI Standard requires behavior
different than that implied by the test case we would appreciate feedback.
Comments may be sent to:
Richard Treumann
treumann@kgn.ibm.com
****************************************************************************
MPI-Java version :
Sung-Hoon Ko(shko@npac.syr.edu)
Northeast Parallel Architectures Center at Syracuse University
03/22/98
****************************************************************************
*/
import mpi.*;
public class cart {
static public void main(String[] args) throws Exception {
try {
cart c = new cart(args);
}
catch (Exception e) {
}
}
public cart() {
}
public cart(String[] args) throws Exception {
final int MAXDIMS = 10;
int tasks, me, type, ndims;
int rank, src, dest, rc;
int cnt = 0, i;
MPI.Init(args);
rank = MPI.COMM_WORLD.Rank();
Comm comms[] = new Comm[20];
Group gid = MPI.COMM_WORLD.Group();
tasks = gid.Size();
if (tasks != 6) {
if (rank == 0)
System.out.println("topo->cart: MUST RUN WITH 6 TASKS");
MPI.COMM_WORLD.Barrier();
MPI.Finalize();
return;
}
/* test non-periodic topology */
int dims2[] = new int[2];
dims2[0] = 0;
dims2[1] = 0;
// Cartcomm.Dims_create(tasks,dims2);
dims2[0] = 3;
dims2[1] = 2;
if (dims2[0] != 3 || dims2[1] != 2)
System.out.println("ERROR in MPI_Dims_create, dims = " + dims2[0] + ","
+ dims2[1] + ", should be 3, 2");
boolean periods2[] = new boolean[2];
periods2[0] = false;
periods2[1] = false;
Cartcomm comm = MPI.COMM_WORLD.Create_cart(dims2, periods2, false);
comms[cnt++] = comm;
me = comm.Rank();
type = comm.Topo_test();
if (type != MPI.CART)
System.out.println("ERROR in MPI_Topo_test, type = " + type
+ ", should be " + MPI.CART);
ndims = comm.Get().dims.length;
if (ndims != 2)
System.out.println("ERROR in MPI_Cartdim_get, ndims = " + ndims
+ ", should be 2");
int coords2[] = new int[2];
dims2 = comm.Get().dims;
periods2 = comm.Get().periods;
coords2 = comm.Get().coords;
if (dims2[0] != 3 || dims2[1] != 2)
System.out.println("ERROR in MPI_Cart_get, dims = " + dims2[0] + ","
+ dims2[1] + ", should be 3, 2");
if (periods2[0] != false || periods2[1] != false)
System.out.println("WRONG PERIODS!");
if (coords2[0] != me / 2 || coords2[1] != me % 2) {
System.out.println("ERROR in MPI_Cart_get, coords = " + coords2[0] + ","
+ coords2[1] + ", should be " + (me / 2) + "," + (me % 2));
System.exit(0);
}
rank = comm.Rank(coords2);
if (rank != me)
System.out.println("ERROR in MPI_Cart_rank, rank = " + rank
+ ", should be " + me);
coords2 = comm.Coords(rank);
if (coords2[0] != me / 2 || coords2[1] != me % 2) {
System.out.println("ERROR in MPI_Cart_coords, coords = " + coords2[0]
+ "," + coords2[1] + ", should be " + (me / 2) + ", " + (me % 2));
System.exit(0);
}
src = comm.Shift(0, 5).rank_source;
dest = comm.Shift(0, 5).rank_dest;
if (src != MPI.PROC_NULL || dest != MPI.PROC_NULL)
System.out.println("ERROR in MPI_Cart_shift, src/dest = " + src + ","
+ dest + ", should be " + MPI.PROC_NULL + ", " + MPI.PROC_NULL);
src = comm.Shift(0, 1).rank_source;
dest = comm.Shift(0, 1).rank_dest;
if (me / 2 < 2 && dest != me + 2)
System.out.println("ERROR in MPI_Cart_shift, dest = " + dest
+ ", should be " + (me + 2));
if (me / 2 > 0 && src != me - 2)
System.out.println("ERROR in MPI_Cart_shift, src = " + src
+ ", should be " + (me - 2));
src = comm.Shift(1, -1).rank_source;
dest = comm.Shift(1, -1).rank_dest;
if ((me % 2 == 1) && (dest != me - 1))
System.out.println("ERROR in MPI_Cart_shift, dest = " + dest
+ ", should be " + (me - 1));
if (me % 2 == 1 && src != MPI.PROC_NULL)
System.out.println("ERROR in MPI_Cart_shift, src = " + src
+ ", should be " + MPI.PROC_NULL);
if (me % 2 == 0 && src != me + 1)
System.out.println("ERROR in MPI_Cart_shift, src = " + src
+ ", should be " + (me + 1));
if (me % 2 == 0 && dest != MPI.PROC_NULL)
System.out.println("ERROR in MPI_Cart_shift, dest = " + dest
+ ", should be " + MPI.PROC_NULL);
/* test periodic topology */
dims2[0] = 2;
dims2[1] = 0;
Cartcomm.Dims_create(tasks, dims2);
dims2[0] = 2;
dims2[1] = 3;
if (dims2[0] != 2 || dims2[1] != 3)
System.out.println("ERROR in MPI_Dims_create, dims = " + dims2[0] + ","
+ dims2[1] + ", should be 2, 3");
periods2[0] = true;
periods2[1] = true;
comm = MPI.COMM_WORLD.Create_cart(dims2, periods2, false);
comms[cnt++] = comm;
me = comm.Rank();
coords2[0] = me / 3;
coords2[1] = me % 3;
rank = comm.Rank(coords2);
if (rank != me)
System.out.println("ERROR in MPI_Cart_rank, rank = " + rank
+ ", should be " + me);
coords2 = comm.Coords(rank);
if (coords2[0] != me / 3 || coords2[1] != me % 3)
System.out.println("ERROR in MPI_Cart_coords, coords = " + coords2[0]
+ "," + coords2[1] + ", should be " + (me / 3) + "," + (me % 3));
src = comm.Shift(0, 5).rank_source;
dest = comm.Shift(0, 5).rank_dest;
if (src != (me + 3) % 6 || dest != (me + 3) % 6)
System.out.println("ERROR in MPI_Cart_shift, src/dest = " + src + ", "
+ dest + ", should be " + (me + 3) + ", " + (me + 3));
src = comm.Shift(1, -1).rank_source;
dest = comm.Shift(1, -1).rank_dest;
int k = (me % 3 == 0) ? 1 : 0;
if (dest != (me - 1) + 3 * k)
System.out.println("ERROR in MPI_Cart_shift, dest = " + dest
+ ", should be " + ((me - 1 + 3) % 3));
k = (me % 3 == 2) ? 1 : 0;
if (src != (me + 1) - 3 * k)
System.out.println("ERROR in MPI_Cart_shift, src = " + src
+ ", should be " + ((me + 1 + 3) % 3));
dims2[0] = 1;
comm = MPI.COMM_WORLD.Create_cart(dims2, periods2, false);
comms[cnt++] = comm;
MPI.COMM_WORLD.Barrier();
if (me == 0)
System.out.println("Cart TEST COMPLETE\n");
MPI.Finalize();
}
}
| |
package japicmp.compat;
import japicmp.cmp.ClassesHelper;
import japicmp.cmp.JarArchiveComparatorOptions;
import japicmp.model.AccessModifier;
import japicmp.model.JApiChangeStatus;
import japicmp.model.JApiClass;
import japicmp.model.JApiCompatibilityChange;
import japicmp.model.JApiConstructor;
import japicmp.model.JApiField;
import japicmp.model.JApiMethod;
import japicmp.model.JApiSuperclass;
import japicmp.util.CtClassBuilder;
import japicmp.util.CtConstructorBuilder;
import japicmp.util.CtFieldBuilder;
import japicmp.util.CtInterfaceBuilder;
import japicmp.util.CtMethodBuilder;
import javassist.ClassPool;
import javassist.CtClass;
import org.hamcrest.core.Is;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static japicmp.util.Helper.getJApiClass;
import static japicmp.util.Helper.getJApiConstructor;
import static japicmp.util.Helper.getJApiField;
import static japicmp.util.Helper.getJApiMethod;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
public class CompatibilityChangesTest {
@Test
public void testClassRemoved() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
return Collections.emptyList();
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.REMOVED));
assertThat(jApiClass.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.CLASS_REMOVED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
}
@Test
public void testClassNowAbstract() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().abstractModifier().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.CLASS_NOW_ABSTRACT));
assertThat(jApiClass.isBinaryCompatible(), is(false));
}
@Test
public void testClassNowFinal() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().finalModifier().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.CLASS_NOW_FINAL));
assertThat(jApiClass.isBinaryCompatible(), is(false));
}
@Test
public void testClassNoLongerPublic() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().privateModifier().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.CLASS_NO_LONGER_PUBLIC));
assertThat(jApiClass.isBinaryCompatible(), is(false));
}
@Test
public void testClassTypeChanged() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtInterfaceBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.CLASS_TYPE_CHANGED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
}
@Test
public void testSuperclassRemoved() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Arrays.asList(ctClass, superclass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtInterfaceBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.getSuperclass().getCompatibilityChanges(), hasItem(JApiCompatibilityChange.SUPERCLASS_REMOVED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
}
@Test
public void testSuperclassChanged() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Arrays.asList(ctClass, superclass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass2").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Arrays.asList(ctClass, superclass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.getSuperclass().getCompatibilityChanges(), hasItem(JApiCompatibilityChange.SUPERCLASS_REMOVED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
}
@Test
public void testSuperclassAdded() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Arrays.asList(ctClass, superclass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.getSuperclass().getCompatibilityChanges(), hasItem(JApiCompatibilityChange.SUPERCLASS_ADDED));
assertThat(jApiClass.isBinaryCompatible(), is(true));
JApiSuperclass superclass = jApiClass.getSuperclass();
assertThat(superclass.isBinaryCompatible(), is(true));
assertThat(superclass.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.SUPERCLASS_ADDED));
}
@Test
public void testSuperclassUnchangedObject() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.UNCHANGED));
assertThat(jApiClass.getCompatibilityChanges().size(), is(0));
assertThat(jApiClass.isBinaryCompatible(), is(true));
JApiSuperclass superclass = jApiClass.getSuperclass();
assertThat(superclass.isBinaryCompatible(), is(true));
assertThat(superclass.getCompatibilityChanges().size(), is(0));
}
@Test
public void testMethodRemovedInSuperclass() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(superclass).name("getInstance").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Arrays.asList(ctClass, superclass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Arrays.asList(ctClass, superclass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.UNCHANGED));
assertThat(jApiClass.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_REMOVED_IN_SUPERCLASS));
assertThat(jApiClass.isBinaryCompatible(), is(false));
}
@Test
public void testMethodRemovedInSuperclassButOverriddenInSubclass() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(superclass).name("getInstance").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(superclass).name("getInstance").addToClass(ctClass);
return Arrays.asList(ctClass, superclass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(superclass).name("getInstance").addToClass(ctClass);
return Arrays.asList(ctClass, superclass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.UNCHANGED));
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(true));
}
@Test
public void testFieldRemovedInSuperclass() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Arrays.asList(ctClass, superclass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Arrays.asList(ctClass, superclass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.UNCHANGED));
assertThat(jApiClass.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.FIELD_REMOVED_IN_SUPERCLASS));
assertThat(jApiClass.isBinaryCompatible(), is(false));
}
@Test
public void testFieldRemovedInSuperclassButOverriddenInSubclass() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(ctClass);
return Arrays.asList(ctClass, superclass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(ctClass);
return Arrays.asList(ctClass, superclass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.UNCHANGED));
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(true));
}
@Test
public void testMethodRemoved() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("isRemoved").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "isRemoved");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_REMOVED));
assertThat(jApiMethod.isBinaryCompatible(), is(false));
JApiSuperclass superclass = jApiClass.getSuperclass();
assertThat(superclass.isBinaryCompatible(), is(true));
assertThat(superclass.getCompatibilityChanges().size(), is(0));
}
@Test
public void testMethodLessAccessiblePublicToPrivate() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("isRemoved").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().privateAccess().returnType(CtClass.booleanType).name("isRemoved").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "isRemoved");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_LESS_ACCESSIBLE));
assertThat(jApiMethod.isBinaryCompatible(), is(false));
}
@Test
public void testMethodLessAccessibleThanInSuperclass() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("isRemoved").body("return true;").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Arrays.asList(superclass, ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("isRemoved").body("return true;").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
CtMethodBuilder.create().protectedAccess().returnType(CtClass.booleanType).name("isRemoved").body("return true;").addToClass(ctClass);
return Arrays.asList(superclass, ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "isRemoved");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_LESS_ACCESSIBLE_THAN_IN_SUPERCLASS));
assertThat(jApiMethod.isBinaryCompatible(), is(false));
}
@Test
public void testMethodStaticOverridesNonStatic() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("isOverridden").body("return true;").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Arrays.asList(superclass, ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("isOverridden").body("return true;").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().staticAccess().returnType(CtClass.booleanType).name("isOverridden").body("return true;").addToClass(ctClass);
return Arrays.asList(superclass, ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "isOverridden");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_IS_STATIC_AND_OVERRIDES_NOT_STATIC));
assertThat(jApiMethod.isBinaryCompatible(), is(false));
}
@Test
public void testMethodReturnTypeChanges() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.intType).name("returnTypeChanges").body("return 42;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("returnTypeChanges").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "returnTypeChanges");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_RETURN_TYPE_CHANGED));
assertThat(jApiMethod.isBinaryCompatible(), is(false));
}
@Test
public void testMethodNowAbstract() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("methodBecomesAbstract").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().abstractMethod().returnType(CtClass.booleanType).name("methodBecomesAbstract").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "methodBecomesAbstract");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_NOW_ABSTRACT));
assertThat(jApiMethod.isBinaryCompatible(), is(false));
}
@Test
public void testMethodNowFinal() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("methodBecomesFinal").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().finalMethod().returnType(CtClass.booleanType).name("methodBecomesFinal").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "methodBecomesFinal");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_NOW_FINAL));
assertThat(jApiMethod.isBinaryCompatible(), is(false));
}
@Test
public void testMethodNowStatic() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("methodBecomesStatic").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().staticAccess().returnType(CtClass.booleanType).name("methodBecomesStatic").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "methodBecomesStatic");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_NOW_STATIC));
assertThat(jApiMethod.isBinaryCompatible(), is(false));
}
@Test
public void testMethodNoLongerStatic() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().staticAccess().returnType(CtClass.booleanType).name("methodNoLongerStatic").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().returnType(CtClass.booleanType).name("methodNoLongerStatic").body("return true;").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "methodNoLongerStatic");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_NO_LONGER_STATIC));
assertThat(jApiMethod.isBinaryCompatible(), is(false));
}
@Test
public void testFieldStaticOverridesStatic() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtFieldBuilder.create().staticAccess().type(CtClass.intType).name("field").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtFieldBuilder.create().staticAccess().type(CtClass.intType).name("field").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
CtFieldBuilder.create().staticAccess().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiField jApiField = getJApiField(jApiClass.getFields(), "field");
assertThat(jApiField.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.FIELD_STATIC_AND_OVERRIDES_STATIC));
assertThat(jApiField.isBinaryCompatible(), is(false));
}
@Test
public void testFieldLessAccessibleThanInSuperclass() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superclass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(superclass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superclass).addToClassPool(classPool);
CtFieldBuilder.create().packageProtectedAccess().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiField jApiField = getJApiField(jApiClass.getFields(), "field");
assertThat(jApiField.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.FIELD_LESS_ACCESSIBLE_THAN_IN_SUPERCLASS));
assertThat(jApiField.isBinaryCompatible(), is(false));
}
@Test
public void testFieldNowFinal() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().finalAccess().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiField jApiField = getJApiField(jApiClass.getFields(), "field");
assertThat(jApiField.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.FIELD_NOW_FINAL));
assertThat(jApiField.isBinaryCompatible(), is(false));
}
@Test
public void testFieldNowStatic() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().staticAccess().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiField jApiField = getJApiField(jApiClass.getFields(), "field");
assertThat(jApiField.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.FIELD_NOW_STATIC));
assertThat(jApiField.isBinaryCompatible(), is(false));
}
@Test
public void testFieldNoLongerStatic() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().staticAccess().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiField jApiField = getJApiField(jApiClass.getFields(), "field");
assertThat(jApiField.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.FIELD_NO_LONGER_STATIC));
assertThat(jApiField.isBinaryCompatible(), is(false));
}
@Test
public void testFieldTypeChanged() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.floatType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiField jApiField = getJApiField(jApiClass.getFields(), "field");
assertThat(jApiField.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.FIELD_TYPE_CHANGED));
assertThat(jApiField.isBinaryCompatible(), is(false));
}
@Test
public void testFieldRemoved() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiField jApiField = getJApiField(jApiClass.getFields(), "field");
assertThat(jApiField.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.FIELD_REMOVED));
assertThat(jApiField.isBinaryCompatible(), is(false));
}
@Test
public void testFieldLessAccessible() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtFieldBuilder.create().packageProtectedAccess().type(CtClass.intType).name("field").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiField jApiField = getJApiField(jApiClass.getFields(), "field");
assertThat(jApiField.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.FIELD_LESS_ACCESSIBLE));
assertThat(jApiField.isBinaryCompatible(), is(false));
}
@Test
public void testConstructorRemoved() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtConstructorBuilder.create().publicAccess().parameter(CtClass.intType).addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiConstructor jApiConstructor = getJApiConstructor(jApiClass.getConstructors(), Collections.singletonList("int"));
assertThat(jApiConstructor.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.CONSTRUCTOR_REMOVED));
assertThat(jApiConstructor.isBinaryCompatible(), is(false));
}
@Test
public void testConstructorLessAccessible() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtConstructorBuilder.create().publicAccess().parameter(CtClass.intType).addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtConstructorBuilder.create().protectedAccess().parameter(CtClass.intType).addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(false));
JApiConstructor jApiConstructor = getJApiConstructor(jApiClass.getConstructors(), Collections.singletonList("int"));
assertThat(jApiConstructor.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.CONSTRUCTOR_LESS_ACCESSIBLE));
assertThat(jApiConstructor.isBinaryCompatible(), is(false));
}
@Test
public void testMethodAddedToInterface() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtInterfaceBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtInterfaceBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().abstractMethod().name("method").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(false));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "method");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_ADDED_TO_INTERFACE));
assertThat(jApiMethod.isBinaryCompatible(), is(true));
assertThat(jApiMethod.isSourceCompatible(), is(false));
}
@Test
public void testAbstractClassNowExtendsAnotherAbstractClass() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superClass = CtClassBuilder.create().abstractModifier().name("japicmp.Superclass").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().abstractModifier().name("japicmp.Test").addToClassPool(classPool);
return Arrays.asList(ctClass, superClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superClass = CtClassBuilder.create().abstractModifier().name("japicmp.Superclass").addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.intType).abstractMethod().name("newAbstractMethod").addToClass(superClass);
CtClass ctClass = CtClassBuilder.create().abstractModifier().name("japicmp.Test").withSuperclass(superClass).addToClassPool(classPool);
return Arrays.asList(ctClass, superClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(false));
jApiClass = getJApiClass(jApiClasses, "japicmp.Superclass");
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "newAbstractMethod");
assertThat(jApiMethod.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_ABSTRACT_ADDED_TO_CLASS));
assertThat(jApiMethod.isBinaryCompatible(), is(true));
assertThat(jApiMethod.isSourceCompatible(), is(false));
}
@Test
public void testMethodAddedToNewInterface() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
return Collections.emptyList();
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtInterfaceBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().abstractMethod().name("method").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.NEW));
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(true));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "method");
assertThat(jApiMethod.getCompatibilityChanges().size(), is(0));
assertThat(jApiMethod.isBinaryCompatible(), is(true));
assertThat(jApiMethod.isSourceCompatible(), is(true));
}
@Test
public void testInterfaceMovedToAbstractClass() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctInterface = CtInterfaceBuilder.create().name("Interface").addToClassPool(classPool);
CtMethodBuilder.create().abstractMethod().returnType(CtClass.voidType).name("method").addToClass(ctInterface);
CtClass ctClass = CtClassBuilder.create().name("Test").implementsInterface(ctInterface).addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.voidType).name("method").body("int a = 42;").addToClass(ctClass);
return Arrays.asList(ctInterface, ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctInterface = CtInterfaceBuilder.create().name("Interface").addToClassPool(classPool);
CtMethodBuilder.create().abstractMethod().returnType(CtClass.voidType).name("method").addToClass(ctInterface);
CtClass ctAbstractClass = CtClassBuilder.create().name("AbstractTest").implementsInterface(ctInterface).addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("Test").withSuperclass(ctAbstractClass).addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.voidType).name("method").body("int a = 42;").addToClass(ctClass);
return Arrays.asList(ctInterface, ctClass, ctAbstractClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(true));
assertThat(jApiClass.getInterfaces().size(), is(1));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "method");
assertThat(jApiMethod.getCompatibilityChanges().size(), is(0));
assertThat(jApiMethod.isBinaryCompatible(), is(true));
assertThat(jApiMethod.isSourceCompatible(), is(true));
}
@Test
public void testAbstractMethodMovedToInterface() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctInterface = CtInterfaceBuilder.create().name("Interface").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("Test").implementsInterface(ctInterface).addToClassPool(classPool);
CtMethodBuilder.create().abstractMethod().returnType(CtClass.voidType).name("method").addToClass(ctClass);
return Arrays.asList(ctInterface, ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctInterface = CtInterfaceBuilder.create().name("Interface").addToClassPool(classPool);
CtMethodBuilder.create().abstractMethod().returnType(CtClass.voidType).name("method").addToClass(ctInterface);
CtClass ctClass = CtClassBuilder.create().name("Test").implementsInterface(ctInterface).addToClassPool(classPool);
return Arrays.asList(ctInterface, ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(true));
assertThat(jApiClass.getInterfaces().size(), is(1));
JApiMethod jApiMethod = getJApiMethod(jApiClass.getMethods(), "method");
assertThat(jApiMethod.getChangeStatus(), is(JApiChangeStatus.REMOVED));
assertThat(jApiMethod.isBinaryCompatible(), is(true));
assertThat(jApiMethod.isSourceCompatible(), is(true));
}
@Test
public void testMethodMovedFromOneInterfaceToAnother() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctInterface1 = CtInterfaceBuilder.create().name("Interface1").addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.intType).abstractMethod().name("method").addToClass(ctInterface1);
CtClass ctInterface2 = CtInterfaceBuilder.create().name("Interface2").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().abstractModifier().name("japicmp.Test").implementsInterface(ctInterface1).implementsInterface(ctInterface2).addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.intType).name("method").body("return 42;").addToClass(ctClass);
return Arrays.asList(ctClass, ctInterface1, ctInterface2);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctInterface1 = CtInterfaceBuilder.create().name("Interface1").addToClassPool(classPool);
CtClass ctInterface2 = CtInterfaceBuilder.create().name("Interface2").addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.intType).abstractMethod().name("method").addToClass(ctInterface2);
CtClass ctClass = CtClassBuilder.create().abstractModifier().name("japicmp.Test").implementsInterface(ctInterface1).implementsInterface(ctInterface2).addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.intType).name("method").body("return 42;").addToClass(ctClass);
return Arrays.asList(ctClass, ctInterface1, ctInterface2);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.UNCHANGED));
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(true));
}
@Test
public void testMethodMovedFromOneAbstractClassToAnother() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setIncludeSynthetic(true);
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass abstractClass1 = CtClassBuilder.create().abstractModifier().name("AbstractClass1").addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.intType).abstractMethod().name("method").addToClass(abstractClass1);
CtClass abstractClass2 = CtClassBuilder.create().abstractModifier().name("AbstractClass2").withSuperclass(abstractClass1).addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().abstractModifier().name("japicmp.Test").withSuperclass(abstractClass2).addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.intType).name("method").body("return 42;").addToClass(ctClass);
return Arrays.asList(ctClass, abstractClass1, abstractClass2);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass abstractClass1 = CtClassBuilder.create().abstractModifier().name("AbstractClass1").addToClassPool(classPool);
CtClass abstractClass2 = CtClassBuilder.create().abstractModifier().name("AbstractClass2").withSuperclass(abstractClass1).addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.intType).abstractMethod().name("method").addToClass(abstractClass2);
CtClass ctClass = CtClassBuilder.create().abstractModifier().name("japicmp.Test").withSuperclass(abstractClass2).addToClassPool(classPool);
CtMethodBuilder.create().returnType(CtClass.intType).name("method").body("return 42;").addToClass(ctClass);
return Arrays.asList(ctClass, abstractClass1, abstractClass2);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.UNCHANGED));
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(true));
}
@Test
public void testClassNowCheckedException() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
options.setAccessModifier(AccessModifier.PRIVATE);
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(classPool.get("java.lang.Exception")).addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.getChangeStatus(), is(JApiChangeStatus.MODIFIED));
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(false));
assertThat(jApiClass.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.CLASS_NOW_CHECKED_EXCEPTION));
}
@Test
public void testMethodThrowsNewCheckedException() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().name("method").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().name("method").exceptions(new CtClass[] {classPool.get("java.lang.Exception")}).addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
JApiMethod method = getJApiMethod(jApiClass.getMethods(), "method");
assertThat(method.getExceptions().size(), Is.is(1));
assertThat(method.getExceptions().get(0).getChangeStatus(), Is.is(JApiChangeStatus.NEW));
assertThat(method.getExceptions().get(0).isCheckedException(), Is.is(true));
assertThat(method.isBinaryCompatible(), is(true));
assertThat(method.isSourceCompatible(), is(false));
assertThat(method.getCompatibilityChanges(), hasItem(JApiCompatibilityChange.METHOD_NOW_THROWS_CHECKED_EXCEPTION));
}
@Test
public void testMethodThrowsNewRuntimeException() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().name("method").addToClass(ctClass);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().name("method").exceptions(new CtClass[] {classPool.get("java.lang.RuntimeException")}).addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
JApiMethod method = getJApiMethod(jApiClass.getMethods(), "method");
assertThat(method.getExceptions().size(), Is.is(1));
assertThat(method.getExceptions().get(0).getChangeStatus(), Is.is(JApiChangeStatus.NEW));
assertThat(method.getExceptions().get(0).isCheckedException(), Is.is(false));
assertThat(method.isBinaryCompatible(), is(true));
assertThat(method.isSourceCompatible(), is(true));
assertThat(method.getCompatibilityChanges().size(), is(0));
}
@Test
public void testNewMethodThrowsCheckedException() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
return Collections.singletonList(ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").addToClassPool(classPool);
CtMethodBuilder.create().publicAccess().name("method").exceptions(new CtClass[] {classPool.get("java.lang.Exception")}).addToClass(ctClass);
return Collections.singletonList(ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
JApiMethod method = getJApiMethod(jApiClass.getMethods(), "method");
assertThat(method.getExceptions().size(), Is.is(1));
assertThat(method.getExceptions().get(0).getChangeStatus(), Is.is(JApiChangeStatus.NEW));
assertThat(method.getExceptions().get(0).isCheckedException(), Is.is(true));
assertThat(method.isBinaryCompatible(), is(true));
assertThat(method.isSourceCompatible(), is(true));
}
@Test
public void testMemberVariableMovedToSuperclass() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass superClass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superClass).addToClassPool(classPool);
CtFieldBuilder.create().protectedAccess().type(CtClass.intType).name("test").addToClass(ctClass);
return Arrays.asList(superClass, ctClass);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass superClass = CtClassBuilder.create().name("japicmp.Superclass").addToClassPool(classPool);
CtFieldBuilder.create().protectedAccess().type(CtClass.intType).name("test").addToClass(superClass);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superClass).addToClassPool(classPool);
return Arrays.asList(superClass, ctClass);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(true));
}
@Test
public void testInterfaceImplementedBySuperclass() throws Exception {
JarArchiveComparatorOptions options = new JarArchiveComparatorOptions();
List<JApiClass> jApiClasses = ClassesHelper.compareClasses(options, new ClassesHelper.ClassesGenerator() {
@Override
public List<CtClass> createOldClasses(ClassPool classPool) throws Exception {
CtClass ctInterface = CtInterfaceBuilder.create().name("japicmp.Interface").addToClassPool(classPool);
CtClass superClass = CtClassBuilder.create().name("japicmp.Superclass").implementsInterface(ctInterface).addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superClass).implementsInterface(ctInterface).addToClassPool(classPool);
return Arrays.asList(superClass, ctClass, ctInterface);
}
@Override
public List<CtClass> createNewClasses(ClassPool classPool) throws Exception {
CtClass ctInterface = CtInterfaceBuilder.create().name("japicmp.Interface").addToClassPool(classPool);
CtClass superClass = CtClassBuilder.create().name("japicmp.Superclass").implementsInterface(ctInterface).addToClassPool(classPool);
CtClass ctClass = CtClassBuilder.create().name("japicmp.Test").withSuperclass(superClass).addToClassPool(classPool);
return Arrays.asList(superClass, ctClass, ctInterface);
}
});
JApiClass jApiClass = getJApiClass(jApiClasses, "japicmp.Test");
assertThat(jApiClass.isBinaryCompatible(), is(true));
assertThat(jApiClass.isSourceCompatible(), is(true));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.core;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
import org.apache.beam.sdk.transforms.windowing.Sessions;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Test NonMergingActiveWindowSet. */
@RunWith(JUnit4.class)
public class MergingActiveWindowSetTest {
private Sessions windowFn;
private StateInternals state;
private MergingActiveWindowSet<IntervalWindow> set;
private ActiveWindowSet.MergeCallback<IntervalWindow> callback;
@Before
public void setup() {
windowFn = Sessions.withGapDuration(Duration.millis(10));
state = InMemoryStateInternals.forKey("dummyKey");
set = new MergingActiveWindowSet<>(windowFn, state);
@SuppressWarnings("unchecked")
ActiveWindowSet.MergeCallback<IntervalWindow> callback =
mock(ActiveWindowSet.MergeCallback.class);
this.callback = callback;
}
@After
public void after() {
set = null;
state = null;
windowFn = null;
}
private void add(long... instants) {
for (final long instant : instants) {
System.out.println("ADD " + instant);
Sessions.AssignContext context =
windowFn.new AssignContext() {
@Override
public Object element() {
return (Object) instant;
}
@Override
public Instant timestamp() {
return new Instant(instant);
}
@Override
public BoundedWindow window() {
return GlobalWindow.INSTANCE;
}
};
for (IntervalWindow window : windowFn.assignWindows(context)) {
set.ensureWindowExists(window);
}
}
}
private Map<IntervalWindow, IntervalWindow> merge(
List<IntervalWindow> toBeMerged, IntervalWindow mergeResult) throws Exception {
IntervalWindow predictedPostMergeWriteStateAddress =
set.mergedWriteStateAddress(toBeMerged, mergeResult);
System.out.println("BEFORE MERGE");
System.out.println(set);
Map<IntervalWindow, IntervalWindow> map = new HashMap<>();
for (IntervalWindow window : toBeMerged) {
System.out.println("WILL MERGE " + window + " INTO " + mergeResult);
map.put(window, mergeResult);
}
System.out.println("AFTER MERGE");
set.merge(callback);
verify(callback).onMerge(toBeMerged, mergeResult);
System.out.println(set);
assertEquals(predictedPostMergeWriteStateAddress, set.writeStateAddress(mergeResult));
return map;
}
private void activate(Map<IntervalWindow, IntervalWindow> map, long... instants) {
for (long instant : instants) {
IntervalWindow window = window(instant, 10);
IntervalWindow active = map.get(window);
if (active == null) {
active = window;
}
System.out.println("ACTIVATE " + active);
set.ensureWindowIsActive(active);
}
set.checkInvariants();
}
private void cleanup() {
System.out.println("CLEANUP");
set.cleanupTemporaryWindows();
set.checkInvariants();
System.out.println(set);
set.persist();
MergingActiveWindowSet<IntervalWindow> reloaded = new MergingActiveWindowSet<>(windowFn, state);
reloaded.checkInvariants();
assertEquals(set, reloaded);
}
private IntervalWindow window(long start, long size) {
return new IntervalWindow(new Instant(start), new Duration(size));
}
@Test
public void testLifecycle() throws Exception {
// Step 1: New elements show up, introducing NEW windows which are partially merged.
// NEW 1+10
// NEW 2+10
// NEW 15+10
// =>
// ACTIVE 1+11 (target 1+11)
// ACTIVE 15+10 (target 15+10)
add(1, 2, 15);
assertEquals(
ImmutableSet.of(window(1, 10), window(2, 10), window(15, 10)),
set.getActiveAndNewWindows());
Map<IntervalWindow, IntervalWindow> map =
merge(ImmutableList.of(window(1, 10), window(2, 10)), window(1, 11));
activate(map, 1, 2, 15);
assertEquals(ImmutableSet.of(window(1, 11), window(15, 10)), set.getActiveAndNewWindows());
assertEquals(ImmutableSet.of(window(1, 11)), set.readStateAddresses(window(1, 11)));
assertEquals(ImmutableSet.of(window(15, 10)), set.readStateAddresses(window(15, 10)));
cleanup();
// Step 2: Another element, merged into an existing ACTIVE window.
// NEW 3+10
// =>
// ACTIVE 1+12 (target 1+11)
// ACTIVE 15+10 (target 15+10)
add(3);
assertEquals(
ImmutableSet.of(window(3, 10), window(1, 11), window(15, 10)),
set.getActiveAndNewWindows());
map = merge(ImmutableList.of(window(1, 11), window(3, 10)), window(1, 12));
activate(map, 3);
assertEquals(ImmutableSet.of(window(1, 12), window(15, 10)), set.getActiveAndNewWindows());
assertEquals(ImmutableSet.of(window(1, 11)), set.readStateAddresses(window(1, 12)));
assertEquals(ImmutableSet.of(window(15, 10)), set.readStateAddresses(window(15, 10)));
cleanup();
// Step 3: Another element, causing two ACTIVE windows to be merged.
// NEW 8+10
// =>
// ACTIVE 1+24 (target 1+11)
add(8);
assertEquals(
ImmutableSet.of(window(8, 10), window(1, 12), window(15, 10)),
set.getActiveAndNewWindows());
map = merge(ImmutableList.of(window(1, 12), window(8, 10), window(15, 10)), window(1, 24));
activate(map, 8);
assertEquals(ImmutableSet.of(window(1, 24)), set.getActiveAndNewWindows());
assertEquals(ImmutableSet.of(window(1, 11)), set.readStateAddresses(window(1, 24)));
cleanup();
// Step 4: Another element, merged into an existing ACTIVE window.
// NEW 9+10
// =>
// ACTIVE 1+24 (target 1+11)
add(9);
assertEquals(ImmutableSet.of(window(9, 10), window(1, 24)), set.getActiveAndNewWindows());
map = merge(ImmutableList.of(window(1, 24), window(9, 10)), window(1, 24));
activate(map, 9);
assertEquals(ImmutableSet.of(window(1, 24)), set.getActiveAndNewWindows());
assertEquals(ImmutableSet.of(window(1, 11)), set.readStateAddresses(window(1, 24)));
cleanup();
// Step 5: Another element reusing earlier window, merged into an existing ACTIVE window.
// NEW 1+10
// =>
// ACTIVE 1+24 (target 1+11)
add(1);
assertEquals(ImmutableSet.of(window(1, 10), window(1, 24)), set.getActiveAndNewWindows());
map = merge(ImmutableList.of(window(1, 10), window(1, 24)), window(1, 24));
activate(map, 1);
assertEquals(ImmutableSet.of(window(1, 24)), set.getActiveAndNewWindows());
assertEquals(ImmutableSet.of(window(1, 11)), set.readStateAddresses(window(1, 24)));
cleanup();
// Step 6: Window is closed.
set.remove(window(1, 24));
cleanup();
assertTrue(set.getActiveAndNewWindows().isEmpty());
}
@Test
public void testLegacyState() {
// Pre 1.4 we merged window state lazily.
// Simulate loading an active window set with multiple state address windows.
set.addActiveForTesting(
window(1, 12), ImmutableList.of(window(1, 10), window(2, 10), window(3, 10)));
// Make sure we can detect and repair the state.
assertTrue(set.isActive(window(1, 12)));
assertEquals(
ImmutableSet.of(window(1, 10), window(2, 10), window(3, 10)),
set.readStateAddresses(window(1, 12)));
assertEquals(
window(1, 10),
set.mergedWriteStateAddress(
ImmutableList.of(window(1, 10), window(2, 10), window(3, 10)), window(1, 12)));
set.merged(window(1, 12));
cleanup();
// For then on we are back to the eager case.
assertEquals(ImmutableSet.of(window(1, 10)), set.readStateAddresses(window(1, 12)));
}
}
| |
/*
* Copyright 2014 Esri, Inc..
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.esri.gpt.catalog.search;
import com.esri.gpt.catalog.context.CatalogConfiguration;
import static com.esri.gpt.catalog.search.ResourceLinkBuilder.RESOURCE_TYPE;
import com.esri.gpt.control.georss.CswContext;
import com.esri.gpt.framework.collection.StringAttribute;
import com.esri.gpt.framework.context.RequestContext;
import com.esri.gpt.framework.jsf.MessageBroker;
import com.esri.gpt.framework.search.SearchXslRecord;
import com.esri.gpt.framework.util.Val;
import java.lang.reflect.Constructor;
import java.util.Arrays;
import javax.servlet.http.HttpServletRequest;
/**
* Resource link builder for external CSW endpoints.
*/
public class CswResourceLinkBuilder extends ResourceLinkBuilder {
private final CswContext cswContext;
private static final String MAP_VIEWER_URL_PATTERN = "http://www.arcgis.com/home/webmap/viewer.html?url=${url}";
private String mapViewerUrlPattern = MAP_VIEWER_URL_PATTERN;
public static ResourceLinkBuilder newBuilder(RequestContext context, CswContext cswContext,
HttpServletRequest servletRequest, MessageBroker messageBroker) {
// initialize
if (context == null) {
context = RequestContext.extract(servletRequest);
}
if (messageBroker == null) {
messageBroker = new MessageBroker();
messageBroker.setBundleBaseName("gpt.resources.gpt");
}
CatalogConfiguration catCfg = context.getCatalogConfiguration();
String cswResourceLinkBuilderClassName = Val.chkStr(catCfg.getParameters().getValue("cswResourceLinkBuilder"));
String cswMapViewerUrlPattern = Val.chkStr(catCfg.getParameters().getValue("cswMapViewerUrlPattern"), MAP_VIEWER_URL_PATTERN);
ResourceLinkBuilder linkBuilder = null;
if (cswResourceLinkBuilderClassName.length() == 0) {
linkBuilder = new CswResourceLinkBuilder(cswContext, cswMapViewerUrlPattern);
} else {
try {
Class<?> cls = Class.forName(cswResourceLinkBuilderClassName);
Constructor<?> constructor = cls.getConstructor(CswContext.class,String.class);
linkBuilder = (ResourceLinkBuilder) constructor.newInstance(cswContext,cswMapViewerUrlPattern);
} catch (Exception ex) {
linkBuilder = new CswResourceLinkBuilder(cswContext, cswMapViewerUrlPattern);
}
}
linkBuilder.initialize(servletRequest, context, messageBroker);
return linkBuilder;
}
private CswResourceLinkBuilder(CswContext cswContext, String mapViewerUrlPattern) {
this.cswContext = cswContext;
this.mapViewerUrlPattern = mapViewerUrlPattern;
}
/**
* Builds the bind-able resource links associated with a resultant search
* record.
*
* @param xRecord the underlying CSW record
* @param record the search result record
*/
public void build(SearchXslRecord xRecord, SearchResultRecord record) {
// determine the primary resource URL
this.determineResourceUrl(xRecord, record);
// // build the content type and thumbnail links
// this.buildContentTypeLink(xRecord, record);
this.buildThumbnailLink(xRecord, record);
//
// // build remaining links
// this.buildOpenLink(xRecord, record);
// this.buildPreviewLink(xRecord, record);
// this.buildAGSLinks(xRecord, record);
this.buildMetadataLink(xRecord, record);
// this.buildAddToMapLink(xRecord, record);
this.buildResourceLink(xRecord, record);
// this.buildWebsiteLink(xRecord, record);
// this.buildDetailsLink(xRecord, record);
// this.buildCustomLinks(xRecord, record);
}
@Override
protected void buildAddToMapLink(SearchXslRecord xRecord, SearchResultRecord record) {
String[] allowedServices = {"ags", "wms", "kml"};
if (!xRecord.getLinks().readShowLink(ResourceLink.TAG_ADDTOMAP)) {
return;
}
String resourceUrl = Val.chkStr(record.getResourceUrl());
String serviceType = Val.chkStr(record.getServiceType()).toLowerCase();
Arrays.sort(allowedServices);
if (Arrays.binarySearch(allowedServices, serviceType) < 0) {
return;
}
if (resourceUrl.indexOf("q=") >= 0 && resourceUrl.indexOf("user=") >= 0 && resourceUrl.indexOf("max=") >= 0 && resourceUrl.indexOf("dest=") >= 0 && resourceUrl.indexOf("destuser=") >= 0) {
// look like this is AGP-2-AGP registration; don'e generate preview link
return;
}
// return if we cannot preview
String tmp = resourceUrl.toLowerCase();
if ((resourceUrl.length() == 0)) {
return;
} else if (tmp.indexOf("?getxml=") != -1) {
return;
}
// build the link
String url = mapViewerUrlPattern.replaceAll("\\$\\{url\\}",encodeUrlParam(resourceUrl));
String resourceKey = "catalog.rest.preview";
ResourceLink link = this.makeLink(url, ResourceLink.TAG_PREVIEW, resourceKey);
if (serviceType.length() > 0) {
link.getParameters().add(new StringAttribute(RESOURCE_TYPE, serviceType));
}
if (record.isExternal()) {
link.setForExtenalRecord(true);
}
record.getResourceLinks().add(link);
}
protected void buildResourceLink(SearchXslRecord xRecord, SearchResultRecord record) {
String[] allowedServices = {"ags", "wms", "kml"};
if (!xRecord.getLinks().readShowLink(ResourceLink.TAG_RESOURCE)) {
return;
}
String resourceUrl = Val.chkStr(record.getResourceUrl());
String serviceType = Val.chkStr(record.getServiceType()).toLowerCase();
Arrays.sort(allowedServices);
if (Arrays.binarySearch(allowedServices, serviceType) < 0) {
return;
}
if (resourceUrl.indexOf("q=") >= 0 && resourceUrl.indexOf("user=") >= 0 && resourceUrl.indexOf("max=") >= 0 && resourceUrl.indexOf("dest=") >= 0 && resourceUrl.indexOf("destuser=") >= 0) {
// look like this is AGP-2-AGP registration; don'e generate preview link
return;
}
// return if we cannot preview
String tmp = resourceUrl.toLowerCase();
if ((resourceUrl.length() == 0)) {
return;
} else if (tmp.indexOf("?getxml=") != -1) {
return;
}
// build the link
String resourceKey = "catalog.rest.resource";
ResourceLink link = this.makeLink(encodeUrlParam(resourceUrl), ResourceLink.TAG_RESOURCE, resourceKey);
if (serviceType.length() > 0) {
link.getParameters().add(new StringAttribute(RESOURCE_TYPE, serviceType));
}
if (record.isExternal()) {
link.setForExtenalRecord(true);
}
record.getResourceLinks().add(link);
}
@Override
protected void buildMetadataLink(SearchXslRecord xRecord, SearchResultRecord record) {
if (!xRecord.getLinks().readShowLink(ResourceLink.TAG_METADATA)) {
return;
}
String uuid = Val.chkStr(record.getUuid());
String url = "";
if (uuid.length() > 0) {
// if external we need to use a different route. Because
// of authentication issues. Internal is automatically authenticated
// while if we used rest getrecord for external, the workflow to
// get the user to input username and password may be more complicated since
// we'd have to broker the authentication
url = this.getBaseContextPath() + this.externalMetadataPath + "?uuid="
+ encodeUrlParam(uuid) + "&cswUrl="
+ encodeUrlParam(this.cswContext.getCswUrl())
+ "&cswProfileId="
+ encodeUrlParam(this.cswContext.getCswProfileId());
}
if (url.length() > 0) {
String resourceKey = "catalog.rest.viewFullMetadata";
ResourceLink link = this.makeLink(url, ResourceLink.TAG_METADATA,
resourceKey);
record.getResourceLinks().add(link);
}
}
}
| |
package graph.algorithms.planarity;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import graph.algorithms.numbering.STNumbering;
import graph.elements.Edge;
import graph.elements.EdgeDirection;
import graph.elements.Graph;
import graph.elements.Vertex;
import graph.exception.CannotBeAppliedException;
/**
* Finds planar faces of a graph. Crucial step of many graph drawing algorithms.
* For eac1h edge, there should be a right and a left face
* If (v,w) is an edge, the left face is the one that contains (v,w) and
* the right face is the one that contains (w,v)
* @author Renata
* @param <V> The vertex type
* @param <E> The edge type
*/
public class PlanarFaces<V extends Vertex, E extends Edge<V>> {
/**
* Map contains an edge and a list of faces
* The list will only contain two faces
* The first one being the left one
* and the second being the right one
*/
private Map<E, List<List<E>>> edgeFacesMap;
/**
* List of all faces. Nothing new compared
* to the map above, but this makes it more practical
* when, for example, a face containing certain vertex needs to be found
*/
private List<List<E>> allFaces;
/**
* Complete embedding
*/
private Map<V,List<E>> planarEmbedding;
private Graph<V,E> graph;
private boolean debug = false;
private Logger log = Logger.getLogger(PlanarFaces.class);
public PlanarFaces(Graph<V,E> graph){
this.graph = graph;
}
public PlanarFaces(Graph<V,E> graph, STNumbering<V, E> stNumbering){
this.graph = graph;
}
/**
* Runs the algorithm for forming planar faces
* @param s The first vertex of the st-numbering
* @param t The last vertex of the st-numbering
* @throws CannotBeAppliedException If the graph is not planar
*/
public void formFaces(V s, V t) throws CannotBeAppliedException{
//Select some edge (v,w)
//go from v to w
//find the closest edge in A(w) to (v,w) in the clockwise direction
//and keep going until we return to v
//Do this until all edges are traversed twice, once in every direction
if (debug)
log.info("Form faces");
edgeFacesMap = new HashMap<E, List<List<E>>>();
allFaces = new ArrayList<List<E>>();
Embedding<V,E> embedding = PlanarEmbedding.emedGraph(graph, s, t);
for (E e : graph.getEdges())
edgeFacesMap.put(e, new ArrayList<List<E>>());
List<E> toDestinationTraversed = new ArrayList<E>();
List<E> toOriginTraversed = new ArrayList<E>();
List<E> allEdges = new ArrayList<E>();
//initially add all edges, and as an edge is traversed
//from origin to destination, remove it
allEdges.addAll(graph.getEdges());
//to destination is from a vertex with lower st-number to
//a vertex with higher st-number
//to origin is the other way around
Map<V,Integer> stNumbering = embedding.getStNumbering();
planarEmbedding = embedding.getEmbedding();
if (debug){
log.info("st numbering " + stNumbering);
log.info("Embedding: " + embedding);
}
int numOfEdges = allEdges.size();
List<EdgeDirection> edgeDirections = new ArrayList<EdgeDirection>();
List<V> orderedVertices = new ArrayList<V>(2);
while (toDestinationTraversed.size() < numOfEdges && toOriginTraversed.size() < numOfEdges){
//creating a new list every time because
//different references are needed
List<E> faceEdges = new ArrayList<E>();
edgeDirections.clear();
E edge = allEdges.get(0);
toDestinationTraversed.add(edge);
allEdges.remove(0);
getOrderedEdgeVertices(stNumbering, edge, orderedVertices);
V v = orderedVertices.get(0);
V w = orderedVertices.get(1);
V first = v;
if (debug){
log.info("staring edge " + edge);
log.info("v " + v);
log.info("w " + w);
}
faceEdges.add(edge);
edgeDirections.add(EdgeDirection.TO_DESTINATION);
while (w != first){
List<E> wEdges = planarEmbedding.get(w);
//find the next edge to take in vertex w
int edgeIndex = wEdges.indexOf(edge);
edgeIndex = wEdges.size() - 1 == edgeIndex ? 0 : edgeIndex + 1;
edge = wEdges.get(edgeIndex);
faceEdges.add(edge);
if (debug)
log.info("Edge " + edge);
getOrderedEdgeVertices(stNumbering, edge, orderedVertices);
V v1 = orderedVertices.get(0);
V v2 = orderedVertices.get(1);
if (w == v1){
toDestinationTraversed.add(edge);
edgeDirections.add(EdgeDirection.TO_DESTINATION);
w = v2;
allEdges.remove(edge);
}
else{
toOriginTraversed.add(edge);
edgeDirections.add(EdgeDirection.TO_ORIGIN);
w = v1;
}
if (debug)
log.info("next w " + w);
}
if (debug){
log.info("face edges " + faceEdges);
log.info("directions " + edgeDirections);
}
allFaces.add(faceEdges);
for (int i = 0; i < faceEdges.size(); i++){
E currentEdge = faceEdges.get(i);
EdgeDirection direction = edgeDirections.get(i);
if (direction == EdgeDirection.TO_ORIGIN)
edgeFacesMap.get(currentEdge).add(faceEdges);
else
edgeFacesMap.get(currentEdge).add(0, faceEdges);
}
}
//if (debug)
log.info(edgeFacesMap);
}
private void getOrderedEdgeVertices(Map<V,Integer> stNumbering, E edge, List<V> orderedVetices){
V v1 = edge.getOrigin();
V v2 = edge.getDestination();
orderedVetices.clear();
if (stNumbering.get(v1) < stNumbering.get(v2)){
orderedVetices.add(0, v1);
orderedVetices.add(1, v2);
}
else{
orderedVetices.add(0, v2);
orderedVetices.add(1, v1);
}
}
/**
* Left face of the given edge
* @param edge Edge
* @return left face Edge's left face
*/
public List<E> leftFaceOf(E edge){
return edgeFacesMap.get(edge).get(0);
}
/**
* Right face of the given edge
* @param edge Edge
* @return right face Edge's right face
*/
public List<E> rightFaceOf(E edge){
return edgeFacesMap.get(edge).get(1);
}
/**
* @return All planar faces
*/
public List<List<E>> getAllFaces() {
return allFaces;
}
/**
* @param allFaces the allFaces to set
*/
public void setAllFaces(List<List<E>> allFaces) {
this.allFaces = allFaces;
}
/**
* @return the edgeFacesMap
*/
public Map<E, List<List<E>>> getEdgeFacesMap() {
return edgeFacesMap;
}
public Map<V, List<E>> getPlanarEmbedding() {
return planarEmbedding;
}
}
| |
package com.magicmicky.habitrpglibrary.habits;
/**
* Class storing a user look.
* @author MagicMicky
*
*/
public class UserLook {
private String skin;
private String shirtColor;
private UserHair hair;
private String size;
private boolean costume;
private UserItems items, costumeItems;
public UserLook(UserHair hair, String skin, String armorSet, String size, String shirtColor
, String armor, String head, String shield, String weapon) {
this.hair=hair;
this.skin=skin;
this.size = size;
this.shirtColor = shirtColor;
setItems(new UserItems(armor,head,shield,weapon));
}
public UserLook () {
setItems(new UserItems());
}
/**
* @return the hair of the user.
*/
public UserHair getHair() {
return hair;
}
/**
* @param hair the hair to set
*/
public void setHair(UserHair hair) {
this.hair = hair;
}
/**
* @return the skin of the user
*/
public String getSkin() {
return skin;
}
/**
* @param skin the skin to set
*/
public void setSkin(String skin) {
this.skin = skin;
}
@Override
/**
* Return a string to show the UserLook easily.
*/
public String toString() {
/*return + ": armor" + this.getItems().getArmor() + " head" + this.getItems().getHead()
+ " hair" + this.hair + " skin" + this.getSkin()
+ " weapon" + this.getItems().getWeapon() + " shield" + this.getItems().getShield();*/
return "";
}
/**
* @return the items
*/
public UserItems getItems() {
return items;
}
/**
* @param items the items to set
*/
public void setItems(UserItems items) {
this.items = items;
}
/**
* @return the shirtColor
*/
public String getShirtColor() {
return shirtColor;
}
/**
* @param shirtColor the shirtColor to set
*/
public void setShirtColor(String shirtColor) {
this.shirtColor = shirtColor;
}
/**
* @return the size
*/
public String getSize() {
return size;
}
/**
* @param size the size to set
*/
public void setSize(String size) {
this.size = size;
}
/**
* Set whether or not the user is using a costume
* @param costume
*/
public void setCostume(boolean costume) {
this.costume=costume;
}
/**
* @return whether or not the user is using a costume
*/
public boolean isCostume() {
return costume;
}
/**
* Set the costume items of the user
* @param costumeItems
*/
public void setCostumeItems(UserItems costumeItems) {
this.costumeItems = costumeItems;
}
/**
* @return the costume items of the user
*/
public UserItems getCostumeItems() {
return this.costumeItems;
}
public static class UserItems {
/*
* items
*/
private String armor;
private String head;
private String shield;
private String weapon;
public UserItems() {
}
public UserItems(String armor,String head, String shield, String weapon) {
this.setArmor(armor);
this.setHead(head);
this.setShield(shield);
this.setWeapon(weapon);
}
/**
* @return the armor
*/
public String getArmor() {
return armor;
}
/**
* the armor of the user ( a 0-5 value)
* @param armor the armor to set
*/
public void setArmor(String armor) {
this.armor = armor;
}
/**
* @return the head
*/
public String getHead() {
return head;
}
/**
* The helmet of the user (0-5)
* @param head the head to set
*/
public void setHead(String head) {
this.head = head;
}
/**
* @return the shield
*/
public String getShield() {
return shield;
}
/**
* @param shield the shield to set
*/
public void setShield(String shield) {
this.shield = shield;
}
/**
* @return the weapon
*/
public String getWeapon() {
return weapon;
}
/**
* @param weapon the weapon to set
*/
public void setWeapon(String weapon) {
this.weapon = weapon;
}
/*public List<Reward> toRewards() {
List<Reward> specRewards = new ArrayList<Reward>();
specRewards.add(new SpecialReward(this.getWeapon(), "weapon"));
specRewards.add(new SpecialReward(this.getArmor(), "armor"));
specRewards.add(new SpecialReward(this.getHead(), "head"));
specRewards.add(new SpecialReward(this.getShield(), "shield"));
return specRewards;
}
public List<Reward> toRewardsUpgrade() {
List<Reward> specRewards = new ArrayList<Reward>();
if(this.getWeapon()<6)
specRewards.add(new SpecialReward(this.getWeapon() +1, "weapon"));
if(this.getArmor() < 5)
specRewards.add(new SpecialReward(this.getArmor() +1, "armor"));
if(this.getHead()< 5)
specRewards.add(new SpecialReward(this.getHead()+1, "head"));
if(this.getShield()<5)
specRewards.add(new SpecialReward(this.getShield()+1, "shield"));
return specRewards;
}*/
}
}
| |
/*******************************************************************************
* Copyright 2015 MobileMan GmbH
* www.mobileman.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
/**
* PatientMedication.java
*
* Project: projecth
*
* @author mobileman
* @date 25.11.2010
* @version 1.0
*
* (c) 2010 MobileMan GmbH
*/
package com.mobileman.projecth.domain.patient.medication;
import java.math.BigDecimal;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.annotations.ForeignKey;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.Store;
import com.mobileman.projecth.domain.disease.Disease;
import com.mobileman.projecth.domain.medicine.Medication;
import com.mobileman.projecth.domain.patient.Patient;
/**
* Represents medication consumed by a patient.
*
* @author mobileman
*
*/
@Entity
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)
@Table(name = "patient_medication", schema = "public" )
@Indexed
public class PatientMedication {
/**
*
*/
private static final long serialVersionUID = 1L;
private Long id;
private Patient patient;
private Disease disease;
private Medication medication;
private Date timestamp;
private Date consumptionDate;
private BigDecimal amount;
private String comment;
/**
* @return id of an entity
*/
@Id
@GeneratedValue(strategy=GenerationType.SEQUENCE)
@Column(name = "id", unique = true, nullable = false)
public Long getId() {
return this.id;
}
/**
* @param id
*/
public void setId(Long id) {
this.id = id;
}
/**
* @return patient to whom the medication belongs
*/
@ManyToOne(fetch = FetchType.LAZY, optional=false)
@JoinColumn(name = "patient_id", nullable = false)
@ForeignKey(name = "fk_patient_medication_patient_id")
public Patient getPatient() {
return this.patient;
}
/**
* @param patient patient to whom the medication belongs
*/
public void setPatient(Patient patient) {
this.patient = patient;
}
/**
* Gets a disease
*
* @return disease
*/
@ManyToOne(fetch = FetchType.LAZY, optional=false)
@JoinColumn(name = "disease_id", nullable = false)
@ForeignKey(name = "fk_patient_medication_disease_id")
public Disease getDisease() {
return this.disease;
}
/**
* Setter for disease
*
* @param disease new value of disease
*/
public void setDisease(Disease disease) {
this.disease = disease;
}
/**
* @return consumed medicine
*/
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "medicine_id", nullable = false)
@ForeignKey(name = "fk_patient_medication_medicine_id")
public Medication getMedication() {
return this.medication;
}
/**
* @param medicine consumed medicine
*/
public void setMedication(Medication medicine) {
this.medication = medicine;
}
/**
* @return creation date/time
*/
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "\"timestamp\"", nullable = false)
public Date getTimestamp() {
return this.timestamp;
}
/**
* @param timestamp creation date/time
*/
public void setTimestamp(Date timestamp) {
this.timestamp = timestamp;
}
/**
* @return amount amount of consumed medicine
*/
@Column(name = "amount", nullable = false)
public BigDecimal getAmount() {
return this.amount;
}
/**
* @param amount amount of consumed medicine
*/
public void setAmount(BigDecimal amount) {
this.amount = amount;
}
/**
* @return comment
*/
@Column(name = "comment", nullable = true)
@Field(index=org.hibernate.search.annotations.Index.TOKENIZED, store=Store.NO)
public String getComment() {
return this.comment;
}
/**
* @param comment new value of comment
*/
public void setComment(String comment) {
this.comment = comment;
}
/**
* @return consumptionDate
*/
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "cumsuption_date", nullable = false)
public Date getConsumptionDate() {
return this.consumptionDate;
}
/**
* @param consumptionDate new value of cumsuptionDate
*/
public void setConsumptionDate(Date consumptionDate) {
this.consumptionDate = consumptionDate;
}
}
| |
/*
* This file is part of the Jikes RVM project (http://jikesrvm.org).
*
* This file is licensed to You under the Common Public License (CPL);
* You may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.opensource.org/licenses/cpl1.0.php
*
* See the COPYRIGHT.txt file distributed with this work for information
* regarding copyright ownership.
*/
package org.jikesrvm.compilers.opt.lir2mir;
import static org.jikesrvm.SizeConstants.LOG_BYTES_IN_ADDRESS;
import static org.jikesrvm.compilers.opt.ir.Operators.ARRAYLENGTH_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.DOUBLE_2LONG_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.DOUBLE_REM_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.FLOAT_2LONG_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.FLOAT_REM_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.GET_ARRAY_ELEMENT_TIB_FROM_TIB_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.GET_CLASS_TIB_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.GET_DOES_IMPLEMENT_FROM_TIB_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.GET_OBJ_TIB_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.GET_SUPERCLASS_IDS_FROM_TIB_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.GET_TYPE_FROM_TIB_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.INT_LOAD;
import static org.jikesrvm.compilers.opt.ir.Operators.LONG_2DOUBLE_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.LONG_2FLOAT_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.LONG_DIV_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.LONG_REM_opcode;
import static org.jikesrvm.compilers.opt.ir.Operators.REF_LOAD;
import static org.jikesrvm.compilers.opt.ir.Operators.SYSCALL;
import static org.jikesrvm.compilers.opt.ir.Operators.SYSCALL_opcode;
import static org.jikesrvm.objectmodel.TIBLayoutConstants.TIB_ARRAY_ELEMENT_TIB_INDEX;
import static org.jikesrvm.objectmodel.TIBLayoutConstants.TIB_DOES_IMPLEMENT_INDEX;
import static org.jikesrvm.objectmodel.TIBLayoutConstants.TIB_SUPERCLASS_IDS_INDEX;
import static org.jikesrvm.objectmodel.TIBLayoutConstants.TIB_TYPE_INDEX;
import org.jikesrvm.VM;
import org.jikesrvm.ArchitectureSpecificOpt.CallingConvention;
import org.jikesrvm.ArchitectureSpecificOpt.ComplexLIR2MIRExpansion;
import org.jikesrvm.ArchitectureSpecificOpt.ConvertALUOperators;
import org.jikesrvm.ArchitectureSpecificOpt.NormalizeConstants;
import org.jikesrvm.classloader.RVMType;
import org.jikesrvm.compilers.opt.DefUse;
import org.jikesrvm.compilers.opt.NullCheckCombining;
import org.jikesrvm.compilers.opt.OptOptions;
import org.jikesrvm.compilers.opt.OptimizingCompilerException;
import org.jikesrvm.compilers.opt.depgraph.DepGraph;
import org.jikesrvm.compilers.opt.driver.CompilerPhase;
import org.jikesrvm.compilers.opt.driver.OptimizationPlanAtomicElement;
import org.jikesrvm.compilers.opt.driver.OptimizationPlanCompositeElement;
import org.jikesrvm.compilers.opt.driver.OptimizationPlanElement;
import org.jikesrvm.compilers.opt.driver.OptimizingCompiler;
import org.jikesrvm.compilers.opt.hir2lir.ConvertToLowLevelIR;
import org.jikesrvm.compilers.opt.ir.BasicBlock;
import org.jikesrvm.compilers.opt.ir.Binary;
import org.jikesrvm.compilers.opt.ir.Call;
import org.jikesrvm.compilers.opt.ir.GuardedBinary;
import org.jikesrvm.compilers.opt.ir.GuardedUnary;
import org.jikesrvm.compilers.opt.ir.IR;
import org.jikesrvm.compilers.opt.ir.IRTools;
import org.jikesrvm.compilers.opt.ir.Instruction;
import org.jikesrvm.compilers.opt.ir.Load;
import org.jikesrvm.compilers.opt.ir.MIRInfo;
import org.jikesrvm.compilers.opt.ir.Operators;
import org.jikesrvm.compilers.opt.ir.Unary;
import org.jikesrvm.compilers.opt.ir.operand.AddressConstantOperand;
import org.jikesrvm.compilers.opt.ir.operand.LocationOperand;
import org.jikesrvm.compilers.opt.ir.operand.MethodOperand;
import org.jikesrvm.compilers.opt.ir.operand.Operand;
import org.jikesrvm.compilers.opt.ir.operand.TypeOperand;
import org.jikesrvm.compilers.opt.liveness.LiveAnalysis;
import org.jikesrvm.objectmodel.JavaHeader;
import org.jikesrvm.objectmodel.ObjectModel;
import org.jikesrvm.runtime.Entrypoints;
import org.vmmagic.unboxed.Offset;
/**
* Convert an IR object from LIR to MIR via BURS
*/
public final class ConvertLIRtoMIR extends OptimizationPlanCompositeElement {
/**
* Create this phase element as a composite of other elements
*/
public ConvertLIRtoMIR() {
super("Instruction Selection", new OptimizationPlanElement[]{
// Stage 1: Reduce the LIR operator set to a core set of operators.
new OptimizationPlanAtomicElement(new ReduceOperators()),
// Stage 2: Convert ALU operators
new OptimizationPlanAtomicElement(new ConvertALUOperators()),
// Stage 3: Normalize usage of constants to simplify Stage 3.
new OptimizationPlanAtomicElement(new NormalizeConstantsPhase()),
// Stage 4a: Compute liveness information for DepGraph
new OptimizationPlanAtomicElement(new DoLiveness()),
// Stage 4b: Block by block build DepGraph and do
// BURS based instruction selection.
new OptimizationPlanAtomicElement(new DoBURS()),
// Stage 5: Handle complex operators
// (those that expand to multiple basic blocks of MIR).
new OptimizationPlanAtomicElement(new ComplexOperators()),
// Stage 6: Use validation operands to do null check combining,
// and then finish the removal off all validation
// operands (they are not present in the MIR).
new OptimizationPlanAtomicElement(new NullCheckCombining() {
public void perform(IR ir) {
super.perform(ir);
// ir now contains well formed MIR.
ir.IRStage = IR.MIR;
ir.MIRInfo = new MIRInfo(ir);
}
})});
}
/**
* Stage 1: Reduce the LIR operator set to a core set of operators.
*/
private static final class ReduceOperators extends CompilerPhase {
public String getName() {
return "Reduce Operators";
}
public CompilerPhase newExecution(IR ir) {
return this;
}
public void perform(IR ir) {
for (Instruction s = ir.firstInstructionInCodeOrder(); s != null; s = s.nextInstructionInCodeOrder()) {
switch (s.getOpcode()) {
case ARRAYLENGTH_opcode: {
// array_ref[ObjectModel.getArrayLengthOffset()] contains the length
Load.mutate(s,
INT_LOAD,
GuardedUnary.getClearResult(s),
GuardedUnary.getClearVal(s),
IRTools.AC(ObjectModel.getArrayLengthOffset()),
new LocationOperand(),
GuardedUnary.getClearGuard(s));
}
break;
case GET_OBJ_TIB_opcode:
// TODO: valid location operand.
Operand address = GuardedUnary.getClearVal(s);
Load.mutate(s,
Operators.REF_LOAD,
GuardedUnary.getClearResult(s),
address,
new AddressConstantOperand(JavaHeader.getTibOffset()),
null,
GuardedUnary.getClearGuard(s));
break;
case GET_CLASS_TIB_opcode: {
RVMType type = ((TypeOperand) Unary.getVal(s)).getVMType();
Offset offset = type.getTibOffset();
Load.mutate(s,
REF_LOAD,
Unary.getClearResult(s),
ir.regpool.makeJTOCOp(ir, s),
IRTools.AC(offset),
new LocationOperand(offset));
}
break;
case GET_TYPE_FROM_TIB_opcode: {
// TODO: Valid location operand?
Load.mutate(s,
REF_LOAD,
Unary.getClearResult(s),
Unary.getClearVal(s),
IRTools.AC(Offset.fromIntZeroExtend(TIB_TYPE_INDEX << LOG_BYTES_IN_ADDRESS)),
null);
}
break;
case GET_SUPERCLASS_IDS_FROM_TIB_opcode: {
// TODO: Valid location operand?
Load.mutate(s,
REF_LOAD,
Unary.getClearResult(s),
Unary.getClearVal(s),
IRTools.AC(Offset.fromIntZeroExtend(TIB_SUPERCLASS_IDS_INDEX << LOG_BYTES_IN_ADDRESS)),
null);
}
break;
case GET_DOES_IMPLEMENT_FROM_TIB_opcode: {
// TODO: Valid location operand?
Load.mutate(s,
REF_LOAD,
Unary.getClearResult(s),
Unary.getClearVal(s),
IRTools.AC(Offset.fromIntZeroExtend(TIB_DOES_IMPLEMENT_INDEX << LOG_BYTES_IN_ADDRESS)),
null);
}
break;
case GET_ARRAY_ELEMENT_TIB_FROM_TIB_opcode: {
// TODO: Valid location operand?
Load.mutate(s,
REF_LOAD,
Unary.getClearResult(s),
Unary.getClearVal(s),
IRTools.AC(Offset.fromIntZeroExtend(TIB_ARRAY_ELEMENT_TIB_INDEX << LOG_BYTES_IN_ADDRESS)),
null);
}
break;
case LONG_DIV_opcode: {
if (VM.BuildForPowerPC && VM.BuildFor64Addr) break; // don't reduce operator -- leave for BURS
Call.mutate2(s,
SYSCALL,
GuardedBinary.getClearResult(s),
null,
MethodOperand.STATIC(Entrypoints.sysLongDivideIPField),
GuardedBinary.getClearVal1(s),
GuardedBinary.getClearVal2(s));
ConvertToLowLevelIR.expandSysCallTarget(s, ir);
CallingConvention.expandSysCall(s, ir);
}
break;
case LONG_REM_opcode: {
if (VM.BuildForPowerPC && VM.BuildFor64Addr) break; // don't reduce operator -- leave for BURS
Call.mutate2(s,
SYSCALL,
GuardedBinary.getClearResult(s),
null,
MethodOperand.STATIC(Entrypoints.sysLongRemainderIPField),
GuardedBinary.getClearVal1(s),
GuardedBinary.getClearVal2(s));
ConvertToLowLevelIR.expandSysCallTarget(s, ir);
CallingConvention.expandSysCall(s, ir);
}
break;
case FLOAT_REM_opcode:
case DOUBLE_REM_opcode: {
if (VM.BuildForPowerPC) {
Call.mutate2(s,
SYSCALL,
Binary.getClearResult(s),
null,
MethodOperand.STATIC(Entrypoints.sysDoubleRemainderIPField),
Binary.getClearVal1(s),
Binary.getClearVal2(s));
ConvertToLowLevelIR.expandSysCallTarget(s, ir);
CallingConvention.expandSysCall(s, ir);
}
}
break;
case LONG_2FLOAT_opcode: {
if (VM.BuildForPowerPC) {
Call.mutate1(s,
SYSCALL,
Unary.getClearResult(s),
null,
MethodOperand.STATIC(Entrypoints.sysLongToFloatIPField),
Unary.getClearVal(s));
ConvertToLowLevelIR.expandSysCallTarget(s, ir);
CallingConvention.expandSysCall(s, ir);
}
}
break;
case LONG_2DOUBLE_opcode: {
if (VM.BuildForPowerPC) {
Call.mutate1(s,
SYSCALL,
Unary.getClearResult(s),
null,
MethodOperand.STATIC(Entrypoints.sysLongToDoubleIPField),
Unary.getClearVal(s));
ConvertToLowLevelIR.expandSysCallTarget(s, ir);
CallingConvention.expandSysCall(s, ir);
}
}
break;
case FLOAT_2LONG_opcode: {
if (VM.BuildForPowerPC && VM.BuildFor64Addr || VM.BuildForSSE2Full) break; // don't reduce operator -- leave for BURS
Call.mutate1(s,
SYSCALL,
Unary.getClearResult(s),
null,
MethodOperand.STATIC(Entrypoints.sysFloatToLongIPField),
Unary.getClearVal(s));
ConvertToLowLevelIR.expandSysCallTarget(s, ir);
CallingConvention.expandSysCall(s, ir);
}
break;
case DOUBLE_2LONG_opcode: {
if (VM.BuildForPowerPC && VM.BuildFor64Addr || VM.BuildForSSE2Full) break; // don't reduce operator -- leave for BURS
Call.mutate1(s,
SYSCALL,
Unary.getClearResult(s),
null,
MethodOperand.STATIC(Entrypoints.sysDoubleToLongIPField),
Unary.getClearVal(s));
ConvertToLowLevelIR.expandSysCallTarget(s, ir);
CallingConvention.expandSysCall(s, ir);
}
break;
case SYSCALL_opcode:
CallingConvention.expandSysCall(s, ir);
break;
default:
break;
}
}
}
}
/**
* Stage 2: Normalize usage of int constants to make less work in Stage 3.
*/
private static final class NormalizeConstantsPhase extends CompilerPhase {
public String getName() {
return "Normalize Constants";
}
public CompilerPhase newExecution(IR ir) {
return this;
}
public void perform(IR ir) {
NormalizeConstants.perform(ir);
}
}
private static final class DoLiveness extends CompilerPhase {
public String getName() {
return "Live Handlers";
}
public CompilerPhase newExecution(IR ir) {
return this;
}
public void perform(IR ir) {
if (ir.options.HANDLER_LIVENESS) {
new LiveAnalysis(false, false, true).perform(ir);
} else {
ir.setHandlerLivenessComputed(false);
}
}
}
/**
* Stage 3: Block by block build DepGraph and do BURS based
* instruction selection.
*/
private static final class DoBURS extends CompilerPhase {
public String getName() {
return "DepGraph & BURS";
}
public CompilerPhase newExecution(IR ir) {
return this;
}
public void reportAdditionalStats() {
VM.sysWrite(" ");
VM.sysWrite(container.counter1 / container.counter2 * 100, 2);
VM.sysWrite("% Infrequent BBs");
}
// IR is inconsistent state between DoBURS and ComplexOperators.
// It isn't verifiable again until after ComplexOperators completes.
public void verify(IR ir) { }
public void perform(IR ir) {
OptOptions options = ir.options;
DefUse.recomputeSpansBasicBlock(ir);
MinimalBURS mburs = new MinimalBURS(ir);
NormalBURS burs = new NormalBURS(ir);
for (BasicBlock bb = ir.firstBasicBlockInCodeOrder(); bb != null; bb = bb.nextBasicBlockInCodeOrder()) {
if (bb.isEmpty()) continue;
container.counter2++;
if (bb.getInfrequent()) {
container.counter1++;
if (options.FREQ_FOCUS_EFFORT) {
// Basic block is infrequent -- use quick and dirty instruction selection
mburs.prepareForBlock(bb);
mburs.invoke(bb);
mburs.finalizeBlock(bb);
continue;
}
}
// Use Normal instruction selection.
burs.prepareForBlock(bb);
// I. Build Dependence graph for the basic block
DepGraph dgraph = new DepGraph(ir, bb.firstRealInstruction(), bb.lastRealInstruction(), bb);
if (options.PRINT_DG_BURS) {
// print dependence graph.
OptimizingCompiler.header("DepGraph", ir.method);
dgraph.printDepGraph();
OptimizingCompiler.bottom("DepGraph", ir.method);
}
// II. Invoke BURS and rewrite block from LIR to MIR
try {
burs.invoke(dgraph);
} catch (OptimizingCompilerException e) {
System.err.println("Exception occurred in ConvertLIRtoMIR");
e.printStackTrace();
ir.printInstructions();
throw e;
}
burs.finalizeBlock(bb);
}
}
}
/**
* Stage 4: Handle complex operators
* (those that expand to multiple basic blocks).
*/
private static final class ComplexOperators extends CompilerPhase {
public String getName() {
return "Complex Operators";
}
public CompilerPhase newExecution(IR ir) {
return this;
}
public void perform(IR ir) {
ComplexLIR2MIRExpansion.convert(ir);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.guice.maven;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.maven.plugin.MojoExecutionException;
import org.codehaus.mojo.exec.AbstractExecMojo;
/**
* Runs a Camel using the
* <code>jndi.properties</code> file on the classpath to
* way to <a href="http://camel.apache.org/guice.html">bootstrap via Guice</a>
*
* @goal embedded
* @requiresDependencyResolution runtime
* @execute phase="test-compile"
*/
public class EmbeddedMojo extends AbstractExecMojo {
/**
* The duration to run the application for which by default is in milliseconds.
* A value <= 0 will run forever.
* Adding a s indicates seconds - eg "5s" means 5 seconds.
*
* @parameter property="camel.duration"
* default-value="-1"
*/
protected String duration;
/**
* The DOT File name used to generate the DOT diagram of the route definitions
*
* @parameter default-value="${project.build.directory}/site/cameldoc/routes.dot"
* @readonly
*/
protected String outputDirectory;
/**
* Allows the DOT file generation to be disabled
*
* @parameter property="true"
* @readonly
*/
protected boolean dotEnabled;
/**
* Allows the routes from multiple contexts to be aggregated into one DOT file (in addition to the individual files)
*
* @parameter property="false"
* @readonly
*/
protected boolean dotAggregationEnabled;
/**
* Allows to provide a custom properties file on the classpath to initialize
* a {@link javax.naming.InitialContext} object with. This corresponds to
* the {@link org.apache.camel.guice.Main#setJndiProperties(String)} API
* method
*
* @parameter property="jndiProperties"
*/
protected String jndiProperties;
/**
* Project classpath.
*
* @parameter property="project.testClasspathElements"
* @required
* @readonly
*/
private List<?> classpathElements;
/**
* The main class to execute.
*
* @parameter property="camel.mainClass"
* default-value="org.apache.camel.guice.Main"
* @required
*/
private String mainClass;
/**
* This method will run the mojo
*/
public void execute() throws MojoExecutionException {
try {
executeWithoutWrapping();
} catch (Exception e) {
throw new MojoExecutionException("Failed: " + e, e);
}
}
public void executeWithoutWrapping() throws MalformedURLException, ClassNotFoundException,
NoSuchMethodException, IllegalAccessException, MojoExecutionException {
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
try {
ClassLoader newLoader = createClassLoader(null);
Thread.currentThread().setContextClassLoader(newLoader);
runCamel(newLoader);
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
}
// Properties
//-------------------------------------------------------------------------
/**
* Getter for property output directory.
*
* @return The value of output directory.
*/
public String getOutputDirectory() {
return outputDirectory;
}
/**
* Setter for the output directory.
*
* @param inOutputDirectory The value of output directory.
*/
public void setOutputDirectory(String inOutputDirectory) {
this.outputDirectory = inOutputDirectory;
}
public List<?> getClasspathElements() {
return classpathElements;
}
public void setClasspathElements(List<?> classpathElements) {
this.classpathElements = classpathElements;
}
public boolean isDotEnabled() {
return dotEnabled;
}
public void setDotEnabled(boolean dotEnabled) {
this.dotEnabled = dotEnabled;
}
public String getDuration() {
return duration;
}
public void setDuration(String duration) {
this.duration = duration;
}
public boolean isDotAggregationEnabled() {
return dotAggregationEnabled;
}
public void setDotAggregationEnabled(boolean dotAggregationEnabled) {
this.dotAggregationEnabled = dotAggregationEnabled;
}
public String getMainClass() {
return mainClass;
}
public void setMainClass(String mainClass) {
this.mainClass = mainClass;
}
public String getJndiProperties() {
return jndiProperties;
}
public void setJndiProperties(String jndiProperties) {
this.jndiProperties = jndiProperties;
}
// Implementation methods
//-------------------------------------------------------------------------
protected void runCamel(ClassLoader newLoader) throws ClassNotFoundException, NoSuchMethodException,
IllegalAccessException, MojoExecutionException {
getLog().debug("Running Camel in: " + newLoader);
Class<?> type = newLoader.loadClass(mainClass);
Method method = type.getMethod("main", String[].class);
String[] arguments = createArguments();
getLog().debug("Starting the Camel Main with arguments: " + Arrays.asList(arguments));
try {
method.invoke(null, new Object[] {arguments});
} catch (InvocationTargetException e) {
Throwable t = e.getTargetException();
throw new MojoExecutionException("Failed: " + t, t);
}
}
protected String[] createArguments() {
List<String> args = new ArrayList<String>(5);
if (isDotEnabled()) {
args.add("-outdir");
args.add(getOutputDirectory());
}
if (isDotAggregationEnabled()) {
args.add("-aggregate-dot");
args.add("true");
}
args.add("-duration");
args.add(getDuration());
if (getJndiProperties() != null) {
args.add("-j");
args.add(getJndiProperties());
}
return args.toArray(new String[0]);
}
public ClassLoader createClassLoader(ClassLoader parent) throws MalformedURLException {
getLog().debug("Using classpath: " + classpathElements);
int size = classpathElements.size();
URL[] urls = new URL[size];
for (int i = 0; i < size; i++) {
String name = (String) classpathElements.get(i);
File file = new File(name);
urls[i] = file.toURI().toURL();
getLog().debug("URL: " + urls[i]);
}
URLClassLoader loader = new URLClassLoader(urls, parent);
return loader;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.rel.externalize;
import org.apache.calcite.plan.hep.HepRelVertex;
import org.apache.calcite.plan.volcano.RelSubset;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelWriter;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.sql.SqlExplainLevel;
import org.apache.calcite.util.ImmutableBeans;
import org.apache.calcite.util.Pair;
import org.apache.calcite.util.Util;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
/**
* Utility to dump a rel node plan in dot format.
*/
public class RelDotWriter extends RelWriterImpl {
//~ Instance fields --------------------------------------------------------
/**
* Adjacent list of the plan graph.
*/
private final Map<RelNode, List<RelNode>> outArcTable = new LinkedHashMap<>();
private Map<RelNode, String> nodeLabels = new HashMap<>();
private Multimap<RelNode, String> nodeStyles = HashMultimap.create();
private final WriteOption option;
//~ Constructors -----------------------------------------------------------
public RelDotWriter(
PrintWriter pw, SqlExplainLevel detailLevel,
boolean withIdPrefix) {
this(pw, detailLevel, withIdPrefix, WriteOption.DEFAULT);
}
public RelDotWriter(
PrintWriter pw, SqlExplainLevel detailLevel,
boolean withIdPrefix, WriteOption option) {
super(pw, detailLevel, withIdPrefix);
this.option = option;
}
//~ Methods ----------------------------------------------------------------
@Override protected void explain_(RelNode rel,
List<Pair<String, @Nullable Object>> values) {
// get inputs
List<RelNode> inputs = getInputs(rel);
outArcTable.put(rel, inputs);
// generate node label
String label = getRelNodeLabel(rel, values);
nodeLabels.put(rel, label);
if (highlightNode(rel)) {
nodeStyles.put(rel, "bold");
}
explainInputs(inputs);
}
protected String getRelNodeLabel(
RelNode rel,
List<Pair<String, @Nullable Object>> values) {
List<String> labels = new ArrayList<>();
StringBuilder sb = new StringBuilder();
final RelMetadataQuery mq = rel.getCluster().getMetadataQuery();
if (withIdPrefix) {
sb.append(rel.getId()).append(":");
}
sb.append(rel.getRelTypeName());
labels.add(sb.toString());
sb.setLength(0);
if (detailLevel != SqlExplainLevel.NO_ATTRIBUTES) {
for (Pair<String, @Nullable Object> value : values) {
if (value.right instanceof RelNode) {
continue;
}
sb.append(value.left)
.append(" = ")
.append(value.right);
labels.add(sb.toString());
sb.setLength(0);
}
}
switch (detailLevel) {
case ALL_ATTRIBUTES:
sb.append("rowcount = ")
.append(mq.getRowCount(rel))
.append(" cumulative cost = ")
.append(mq.getCumulativeCost(rel))
.append(" ");
break;
default:
break;
}
switch (detailLevel) {
case NON_COST_ATTRIBUTES:
case ALL_ATTRIBUTES:
if (!withIdPrefix) {
// If we didn't print the rel id at the start of the line, print
// it at the end.
sb.append("id = ").append(rel.getId());
}
break;
default:
break;
}
labels.add(sb.toString().trim());
sb.setLength(0);
// format labels separately and then concat them
int leftSpace = option.maxNodeLabelLength();
List<String> newlabels = new ArrayList<>();
for (int i = 0; i < labels.size(); i++) {
if (option.maxNodeLabelLength() != -1 && leftSpace <= 0) {
if (i < labels.size() - 1) {
// this is not the last label, but we have to stop here
newlabels.add("...");
}
break;
}
String formatted = formatNodeLabel(labels.get(i), option.maxNodeLabelLength());
newlabels.add(formatted);
leftSpace -= formatted.length();
}
return "\"" + String.join("\\n", newlabels) + "\"";
}
private static List<RelNode> getInputs(RelNode parent) {
return Util.transform(parent.getInputs(), child -> {
if (child instanceof HepRelVertex) {
return ((HepRelVertex) child).getCurrentRel();
} else if (child instanceof RelSubset) {
RelSubset subset = (RelSubset) child;
return subset.getBestOrOriginal();
} else {
return child;
}
});
}
private void explainInputs(List<? extends @Nullable RelNode> inputs) {
for (RelNode input : inputs) {
if (input == null || nodeLabels.containsKey(input)) {
continue;
}
input.explain(this);
}
}
@Override public RelWriter done(RelNode node) {
int numOfVisitedNodes = nodeLabels.size();
super.done(node);
if (numOfVisitedNodes == 0) {
// When we enter this method call, no node
// has been visited. So the current node must be the root of the plan.
// Now we are exiting the method, all nodes in the plan
// have been visited, so it is time to dump the plan.
pw.println("digraph {");
// print nodes with styles
for (RelNode rel : nodeStyles.keySet()) {
String style = String.join(",", nodeStyles.get(rel));
pw.println(nodeLabels.get(rel) + " [style=\"" + style + "\"]");
}
// ordinary arcs
for (Map.Entry<RelNode, List<RelNode>> entry : outArcTable.entrySet()) {
RelNode src = entry.getKey();
String srcDesc = nodeLabels.get(src);
for (int i = 0; i < entry.getValue().size(); i++) {
RelNode dst = entry.getValue().get(i);
// label is the ordinal of the arc
// arc direction from child to parent, to reflect the direction of data flow
pw.println(nodeLabels.get(dst) + " -> " + srcDesc + " [label=\"" + i + "\"]");
}
}
pw.println("}");
pw.flush();
}
return this;
}
/**
* Format the label into multiple lines according to the options.
* @param label the original label.
* @param limit the maximal length of the formatted label.
* -1 means no limit.
* @return the formatted label.
*/
private String formatNodeLabel(String label, int limit) {
label = label.trim();
// escape quotes in the label.
label = label.replace("\"", "\\\"");
boolean trimmed = false;
if (limit != -1 && label.length() > limit) {
label = label.substring(0, limit);
trimmed = true;
}
if (option.maxNodeLabelPerLine() == -1) {
// no need to split into multiple lines.
return label + (trimmed ? "..." : "");
}
List<String> descParts = new ArrayList<>();
for (int idx = 0; idx < label.length(); idx += option.maxNodeLabelPerLine()) {
int endIdx = idx + option.maxNodeLabelPerLine() > label.length() ? label.length()
: idx + option.maxNodeLabelPerLine();
descParts.add(label.substring(idx, endIdx));
}
return String.join("\\n", descParts) + (trimmed ? "..." : "");
}
boolean highlightNode(RelNode node) {
Predicate<RelNode> predicate = option.nodePredicate();
return predicate != null && predicate.test(node);
}
/**
* Options for displaying the rel node plan in dot format.
*/
public interface WriteOption {
/** Default configuration. */
WriteOption DEFAULT = ImmutableBeans.create(WriteOption.class);
/**
* The max length of node labels.
* If the label is too long, the visual display would be messy.
* -1 means no limit to the label length.
*/
@ImmutableBeans.Property
@ImmutableBeans.IntDefault(100)
int maxNodeLabelLength();
/**
* The max length of node label in a line.
* -1 means no limitation.
*/
@ImmutableBeans.Property
@ImmutableBeans.IntDefault(20)
int maxNodeLabelPerLine();
/**
* Predicate for nodes that need to be highlighted.
*/
@ImmutableBeans.Property
@Nullable Predicate<RelNode> nodePredicate();
}
}
| |
/**
Copyright 2008 University of Rochester
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package edu.ur.ir.web.action.researcher;
import java.util.Collection;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.opensymphony.xwork2.Preparable;
import edu.ur.ir.researcher.Field;
import edu.ur.ir.researcher.FieldService;
import edu.ur.ir.user.IrRole;
import edu.ur.ir.user.IrUser;
import edu.ur.ir.user.UserService;
import edu.ur.ir.web.action.UserIdAware;
import edu.ur.ir.web.table.Pager;
/**
* Action to deal with fields.
*
* @author Sharmila Ranganathan
*
*/
public class ManageFields extends Pager implements Preparable, UserIdAware {
/** generated version id. */
private static final long serialVersionUID = -3229962214403823020L;
/** field service */
private FieldService fieldService;
/** Logger for managing fields*/
private static final Logger log = LogManager.getLogger(ManageFields.class);
/** Set of fields for viewing the fields */
private Collection<Field> fields;
/** service for users */
private UserService userService;
/** ResearcherField for loading */
private Field field;
/** Message that can be displayed to the user. */
private String message;
/** Indicates the field has been added*/
private boolean added = false;
/** Indicates the fields have been deleted */
private boolean deleted = false;
/** id of the field */
private Long id;
/** id of the user making the change */
private Long userId;
/** Set of field ids */
private long[] fieldIds;
/** type of sort [ ascending | descending ]
* this is for incoming requests */
private String sortType = "asc";
/** Total number of fields */
private int totalHits;
/** Row End */
private int rowEnd;
/** Default constructor */
public ManageFields()
{
numberOfResultsToShow = 25;
numberOfPagesToShow = 10;
}
/**
* Method to create a new field.
*
* Create a new field
*/
public String create()
{
log.debug("creating a field = " + field.getName());
IrUser user = userService.getUser(userId, false);
if(!user.hasRole(IrRole.RESEARCHER_ROLE) && !user.hasRole(IrRole.ADMIN_ROLE) )
{
return "accessDenied";
}
added = false;
Field other = fieldService.getField(field.getName());
if( other == null)
{
fieldService.makeFieldPersistent(field);
added = true;
}
else
{
message = getText("fieldAlreadyExists",
new String[]{field.getName()});
addFieldError("fieldAlreadyExists", message);
}
return "added";
}
/**
* Method to update an existing field.
*
* @return
*/
public String update()
{
log.debug("updateing field id = " + field.getId());
IrUser user = userService.getUser(userId, false);
if(!user.hasRole(IrRole.ADMIN_ROLE) )
{
return "accessDenied";
}
added = false;
Field other = fieldService.getField(field.getName());
if( other == null || other.getId().equals(field.getId()))
{
fieldService.makeFieldPersistent(field);
added = true;
}
else
{
message = getText("fieldAlreadyExists",
new String[]{field.getName()});
addFieldError("fieldAlreadyExists", message);
}
return "added";
}
/**
* Removes the selected items and collections.
*
* @return
*/
public String delete()
{
log.debug("Delete fields called");
IrUser user = userService.getUser(userId, false);
if(!user.hasRole(IrRole.ADMIN_ROLE) )
{
return "accessDenied";
}
if( fieldIds != null )
{
for(int index = 0; index < fieldIds.length; index++)
{
log.debug("Deleting field with id " + fieldIds[index]);
fieldService.deleteField(fieldService.getField(fieldIds[index], false));
}
}
deleted = true;
return "deleted";
}
/**
* Get the fields
*/
public String get()
{
field = fieldService.getField(id, false);
return "get";
}
/**
* Get the fields table data.
*
* @return
*/
public String viewFields()
{
rowEnd = rowStart + numberOfResultsToShow;
fields = fieldService.getFieldsOrderByName(rowStart,
numberOfResultsToShow, sortType);
totalHits = fieldService.getFieldCount().intValue();
if(rowEnd > totalHits)
{
rowEnd = totalHits;
}
return SUCCESS;
}
/**
* Get the field service.
*
* @return
*/
public FieldService getFieldService() {
return fieldService;
}
/**
* Set the field service.
*
* @param fieldService
*/
public void setFieldService(FieldService fieldService) {
this.fieldService = fieldService;
}
/**
* List of fields for display.
*
* @return
*/
public Collection<Field> getFields() {
return fields;
}
/**
* Set the list of fields.
*
* @param fields
*/
public void setFields(Collection<Field> fields) {
this.fields = fields;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public boolean isAdded() {
return added;
}
public void setAdded(boolean added) {
this.added = added;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public long[] getFieldIds() {
return fieldIds;
}
public void setFieldIds(long[] fieldIds) {
this.fieldIds = fieldIds;
}
public boolean getDeleted() {
return deleted;
}
public void setDeleted(boolean deleted) {
this.deleted = deleted;
}
public Field getField() {
return field;
}
public void setField(Field field) {
this.field = field;
}
public void prepare() throws Exception {
if( id != null)
{
field = fieldService.getField(id, false);
}
}
public String getSortType() {
return sortType;
}
public void setSortType(String sortType) {
this.sortType = sortType;
}
public int getTotalHits() {
return totalHits;
}
public void setTotalHits(int totalHits) {
this.totalHits = totalHits;
}
public int getRowEnd() {
return rowEnd;
}
public void setRowEnd(int rowEnd) {
this.rowEnd = rowEnd;
}
public void setUserId(Long userId) {
this.userId = userId;
}
public UserService getUserService() {
return userService;
}
public void setUserService(UserService userService) {
this.userService = userService;
}
}
| |
/**
* Copyright (C) 2014 Twitter Inc and other contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric.samples.cannonball.activity;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.ClipData;
import android.content.ClipDescription;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v4.view.ViewPager;
import android.util.Log;
import android.util.SparseIntArray;
import android.view.DragEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.crashlytics.android.Crashlytics;
import com.crashlytics.android.answers.Answers;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import java.util.Locale;
import io.fabric.samples.cannonball.App;
import io.fabric.samples.cannonball.AppService;
import io.fabric.samples.cannonball.R;
import io.fabric.samples.cannonball.model.Theme;
import io.fabric.samples.cannonball.model.WordBank;
import io.fabric.samples.cannonball.view.CountdownView;
import io.fabric.samples.cannonball.view.FlowLayout;
import io.fabric.samples.cannonball.view.ImageAdapter;
public class PoemBuilderActivity extends Activity {
public static final String KEY_THEME = "Theme";
private static final String TAG = "PoemBuilder";
private static final int PLAY_TIME = 60;
private static final int PLACEHOLDER_POSITION = -1;
private Theme poemTheme;
private FlowLayout poemContainer;
private FlowLayout wordsContainer;
private ViewPager poemImagePager;
private WordBank wordBank;
private TextView placeholder;
private PoemCreatedReceiver poemCreatedReceiver;
private CountDownTimer countDown;
private CountdownView countdownView;
private TextView countdownText;
private TextView shuffleText;
private ImageView tick;
private List<String> suffixes;
private DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US);
private boolean areCrashesEnabled;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_poem_builder);
areCrashesEnabled = App.getInstance().areCrashesEnabled();
poemTheme = (Theme) getIntent().getSerializableExtra(KEY_THEME);
Log.i(TAG,"entrei");
setUpViews();
}
private void setUpViews() {
setUpCountDown();
setUpWordContainers();
setUpBack();
setUpShuffle();
setUpSavePoem();
}
private void setUpSavePoem() {
// Tick button, when you are done
tick = (ImageView) findViewById(R.id.tick);
tick.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Crashlytics.log("PoemBuilder: clicked to save poem");
countDown.cancel();
createPoem();
}
});
}
private void setUpShuffle() {
// Shuffle button
shuffleText = (TextView) findViewById(R.id.shuffle);
shuffleText.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
shuffleWords();
}
});
}
private void setUpBack() {
// go back if clicked
final ImageView backButton = (ImageView) findViewById(R.id.back);
backButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onBackPressed();
}
});
}
private void setUpWordContainers() {
// Setting up the word containers with random words
wordsContainer = (FlowLayout) findViewById(R.id.words_container);
wordsContainer.setOnDragListener(new PanelDragListener());
poemContainer = (FlowLayout) findViewById(R.id.poem_container);
poemContainer.setOnDragListener(new PanelDragListener());
wordBank = new WordBank(getApplicationContext(), poemTheme);
shuffleWords();
poemImagePager = (ViewPager) findViewById(R.id.poem_image_pager);
poemImagePager.setAdapter(new ImageAdapter(getApplicationContext(), poemTheme));
poemContainer.setOnTouchListener(new ImageTouchListener());
// Placeholder used by drag and drop
placeholder = (TextView) findViewById(R.id.placeholder);
placeholder.setText("");
final int PLACEHOLDER = 42;
placeholder.setTag(PLACEHOLDER);
suffixes = Arrays.asList(getResources().getStringArray(R.array.wordbank_suffixes));
}
private void setUpCountDown() {
countdownView = (CountdownView) findViewById(R.id.countdown_spinner_view);
countdownText = (TextView) findViewById(R.id.countdown_count);
countDown = createCountdown(PLAY_TIME);
}
@Override
public void onBackPressed() {
Crashlytics.log("PoemBuilder: getting back, user cancelled the poem creation");
super.onBackPressed();
countDown.cancel();
}
private void createPoem() {
if (poemContainer.getChildCount() > 0) {
final String poemText = getPoemText();
final SparseIntArray imgList = poemTheme.getImageList();
// the line below seems weird, but relies on the fact that the index of SparseIntArray could be any integer
final int poemImage = imgList.keyAt(imgList.indexOfValue(imgList.get(poemImagePager.getCurrentItem() + 1)));
Crashlytics.setString(App.CRASHLYTICS_KEY_POEM_TEXT, poemText);
Crashlytics.setInt(App.CRASHLYTICS_KEY_POEM_IMAGE, poemImage);
AppService.createPoem(getApplicationContext(),
poemText,
poemImage,
poemTheme.getDisplayName(),
dateFormat.format(Calendar.getInstance().getTime()));
} else {
Toast.makeText(getApplicationContext(),
getResources().getString(R.string.toast_wordless_poem), Toast.LENGTH_SHORT)
.show();
Crashlytics.log("PoemBuilder: User tried to create poem without words on it");
}
}
@Override
protected void onResume() {
super.onResume();
final IntentFilter intentFilter = new IntentFilter(App.BROADCAST_POEM_CREATION);
poemCreatedReceiver = new PoemCreatedReceiver();
LocalBroadcastManager.getInstance(this).registerReceiver(poemCreatedReceiver, intentFilter);
}
@Override
protected void onPause() {
super.onPause();
LocalBroadcastManager.getInstance(this).unregisterReceiver(poemCreatedReceiver);
}
class PoemCreatedReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getBooleanExtra(App.BROADCAST_POEM_CREATION_RESULT, false)) {
Crashlytics.log("PoemBuilder: poem saved, receiver called");
Toast.makeText(getApplicationContext(),
getResources().getString(R.string.toast_poem_created), Toast.LENGTH_SHORT)
.show();
final Intent i = new Intent(getApplicationContext(), PoemHistoryActivity.class);
i.putExtra(ThemeChooserActivity.IS_NEW_POEM, true);
startActivity(i);
} else {
Crashlytics.log("PoemBuilder: error when saving poem");
Toast.makeText(getApplicationContext(),
getResources().getString(R.string.toast_poem_error), Toast.LENGTH_SHORT)
.show();
finish();
}
}
}
private void shuffleWords() {
wordsContainer.removeAllViews();
final List<String> wordList = wordBank.loadWords();
for (String w : wordList) {
final TextView wordView
= (TextView) getLayoutInflater().inflate(R.layout.word, null, true);
wordView.setText(w);
wordView.setOnTouchListener(new WordTouchListener());
wordView.setTag(w);
wordsContainer.addView(wordView);
}
Crashlytics.setInt(App.CRASHLYTICS_KEY_WORDBANK_COUNT, wordList.size());
}
private String getPoemText() {
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < poemContainer.getChildCount(); i++) {
final CharSequence word = ((TextView) poemContainer.getChildAt(i)).getText();
if (!suffixes.contains(word) && i != 0) sb.append(" ");
sb.append(word);
}
return sb.toString();
}
final class ImageTouchListener implements View.OnTouchListener {
@Override
public boolean onTouch(View v, MotionEvent event) {
poemImagePager.onTouchEvent(event);
return true;
}
}
private CountDownTimer createCountdown(long remainingSeconds) {
countdownView.setCurrentTime(remainingSeconds);
return new CountDownTimer(remainingSeconds * 1000, 1000) {
public void onTick(long millisUntilFinished) {
final long secsToFinish = millisUntilFinished / 1000;
if (secsToFinish == 10) {
countdownText.setTextColor(getResources().getColor(R.color.red_font));
countdownView.setColor(getResources().getColor(R.color.red_font));
shuffleText.setTextColor(getResources().getColor(R.color.red_font));
}
countdownText.setText("" + secsToFinish);
countdownView.setCurrentTime(secsToFinish);
Crashlytics.setLong(App.CRASHLYTICS_KEY_COUNTDOWN, secsToFinish);
}
public void onFinish() {
Crashlytics.log("PoemBuilder: countdown timer ended, saving poem...");
if (poemContainer.getChildCount() > 0) {
createPoem();
} else {
Crashlytics.log("PoemBuilder: Countdown finishes counting, no words added");
onBackPressed();
}
}
}.start();
}
private final class WordTouchListener implements View.OnTouchListener {
public boolean onTouch(View view, MotionEvent motionEvent) {
if (motionEvent.getAction() == MotionEvent.ACTION_DOWN &&
motionEvent.getPointerId(0) == 0) {
// the getPointerId is to avoid executing something if someone decides to drag with
// 2 fingers
final ClipData data = ClipData.newPlainText("word", (String) view.getTag());
final View.DragShadowBuilder shadowBuilder = new View.DragShadowBuilder(view);
view.startDrag(data, shadowBuilder, view, 0);
view.setVisibility(View.GONE);
return true;
} else if (motionEvent.getAction() == MotionEvent.ACTION_UP) {
// it is a tap, if the touch was quick enough to cancel the execution
// of ACTION_DOWN, we should consider as a tap too.
final FlowLayout parent = (FlowLayout) view.getParent();
if (parent.getId() == R.id.words_container) {
moveView(wordsContainer, poemContainer, view);
} else {
moveView(poemContainer, wordsContainer, view);
}
return true;
} else {
return false;
}
}
}
class PanelDragListener implements View.OnDragListener {
private static final int TAP_RANGE = 200;
private long dragStartedAt;
private int finalPosition;
private ArrayList<TextView> words = new ArrayList<TextView>();
@Override
public boolean onDrag(View v, DragEvent event) {
final int action = event.getAction();
switch (action) {
case DragEvent.ACTION_DRAG_STARTED:
// Determines if this View can accept the dragged data
// Called on all views that has this listener set
final ClipDescription clipDescription = event.getClipDescription();
if (clipDescription.hasMimeType(ClipDescription.MIMETYPE_TEXT_PLAIN)) {
dragStartedAt = System.currentTimeMillis();
words.clear();
return true;
}
return false;
case DragEvent.ACTION_DRAG_ENTERED:
return true;
case DragEvent.ACTION_DRAG_LOCATION:
if (words.size() == 0)
fillWordsList((FlowLayout) v);
finalPosition = pointToWordIndex(event.getX(), event.getY());
if (PLACEHOLDER_POSITION != finalPosition) {
final ViewGroup parent = (ViewGroup) placeholder.getParent();
if (parent != null)
parent.removeView(placeholder);
((FlowLayout) v).addView(placeholder, finalPosition);
placeholder.setVisibility(View.VISIBLE);
}
return true;
case DragEvent.ACTION_DRAG_EXITED:
return true;
case DragEvent.ACTION_DROP:
// Called on the view that received the drop
final View word = (View) event.getLocalState();
final FlowLayout from = (FlowLayout) word.getParent();
FlowLayout to = (FlowLayout) v;
if (System.currentTimeMillis() - dragStartedAt > TAP_RANGE) {
// it was a drag
if (finalPosition >= words.size()) {
moveView(from, to, word);
} else {
moveView(from, to, word, finalPosition);
}
} else { // it is a tap
final int viewId;
if (areCrashesEnabled) {
// Crashlytics:
// To generate the crash, open the app and drag a word really quick
// from the Word Bank to the Poem (in less than TAP_RANGE ms)
viewId = to.getId();
Crashlytics.log("PoemBuilder: An enabled crash will execute");
} else {
viewId = from.getId();
}
if (viewId == R.id.words_container) {
to = poemContainer;
moveView(wordsContainer, poemContainer, word);
} else {
to = wordsContainer;
moveView(poemContainer, wordsContainer, word);
}
}
// change word color if it is on poem container or wordscontainer
if (to.equals(poemContainer)) {
word.setBackgroundResource(R.drawable.word_at_poem);
((TextView) word).setTextColor(getResources().getColor(R.color.white));
word.invalidate();
} else if (to.equals(wordsContainer)) {
word.setBackgroundResource(R.drawable.word);
((TextView) word).setTextColor(getResources().getColor(R.color.green));
word.invalidate();
}
final ViewGroup parent = (ViewGroup) placeholder.getParent();
if (parent != null)
parent.removeView(placeholder);
placeholder.setVisibility(View.GONE);
return true;
case DragEvent.ACTION_DRAG_ENDED:
// Called on all views that has this listener set
if (!event.getResult()) {
// what to do if the view is dropped in the wrong place
final View localState = (View) event.getLocalState();
localState.setVisibility(View.VISIBLE);
final ViewGroup placeholderParent = (ViewGroup) placeholder.getParent();
if (placeholderParent != null)
placeholderParent.removeView(placeholder);
placeholder.setVisibility(View.GONE);
}
return true;
default:
break;
}
return false;
}
private void fillWordsList(FlowLayout view) {
for (int i = 0; i < view.getChildCount(); i++) {
final TextView word = (TextView) view.getChildAt(i);
if (word.getTag() instanceof String)
words.add(word);
}
}
private Integer pointToWordIndex(float x, float y) {
float startX = 0;
boolean reachedY = false;
final int count = words.size();
// Margin top and bottom between words, see flowlayout
final int space = getResources().getDimensionPixelSize(R.dimen.word_padding_vertical);
for (int i = 0; i < count; i++) {
final TextView word = words.get(i);
final float wordX = word.getLeft();
final float wordY = word.getTop();
if (y > wordY - space && y < (wordY + word.getHeight() + space)) {
if (x > startX && x < (wordX + Math.round(word.getWidth() / 2))) {
return i;
}
startX = (wordX + (word.getWidth() / 2));
reachedY = true;
} else if (reachedY) {
return i;
}
}
return count;
}
}
private void moveView(ViewGroup from, ViewGroup to, View object) {
from.removeView(object);
to.addView(object);
object.setVisibility(View.VISIBLE);
onWordMoved();
}
private void moveView(ViewGroup from, ViewGroup to, View object, int position) {
from.removeView(object);
to.addView(object, position);
object.setVisibility(View.VISIBLE);
onWordMoved();
}
private void onWordMoved() {
if (poemContainer.getChildCount() > 0) {
tick.setColorFilter(getResources().getColor(R.color.green));
} else {
tick.setColorFilter(getResources().getColor(R.color.grayish_blue));
}
}
}
| |
package com.wimbli.WorldBorder;
import com.wimbli.WorldBorder.forge.Location;
import com.wimbli.WorldBorder.forge.Log;
import com.wimbli.WorldBorder.forge.Worlds;
import net.minecraft.world.WorldServer;
import net.minecraft.world.chunk.Chunk;
import java.util.Arrays;
import java.util.LinkedHashSet;
public class BorderData
{
// the main data interacted with
private double x = 0;
private double z = 0;
private int radiusX = 0;
private int radiusZ = 0;
private Boolean shapeRound = null;
private boolean wrapping = false;
// some extra data kept handy for faster border checks
private double maxX;
private double minX;
private double maxZ;
private double minZ;
private double radiusXSquared;
private double radiusZSquared;
private double DefiniteRectangleX;
private double DefiniteRectangleZ;
private double radiusSquaredQuotient;
// <editor-fold desc="Constructors">
public BorderData(double x, double z, int radiusX, int radiusZ, Boolean shapeRound, boolean wrap)
{
setData(x, z, radiusX, radiusZ, shapeRound, wrap);
}
public BorderData(double x, double z, int radiusX, int radiusZ)
{
setData(x, z, radiusX, radiusZ, null);
}
public BorderData(double x, double z, int radiusX, int radiusZ, Boolean shapeRound)
{
setData(x, z, radiusX, radiusZ, shapeRound);
}
public BorderData(double x, double z, int radius)
{
setData(x, z, radius, null);
}
public BorderData(double x, double z, int radius, Boolean shapeRound)
{
setData(x, z, radius, shapeRound);
}
// </editor-fold>
// <editor-fold desc="setData overloads">
public final void setData(double x, double z, int radiusX, int radiusZ, Boolean shapeRound, boolean wrap)
{
this.x = x;
this.z = z;
this.shapeRound = shapeRound;
this.wrapping = wrap;
this.setRadiusX(radiusX);
this.setRadiusZ(radiusZ);
}
public final void setData(double x, double z, int radiusX, int radiusZ, Boolean shapeRound)
{
setData(x, z, radiusX, radiusZ, shapeRound, false);
}
public final void setData(double x, double z, int radius, Boolean shapeRound)
{
setData(x, z, radius, radius, shapeRound, false);
}
// </editor-fold>
//<editor-fold desc="Getters and setters">
public double getX()
{
return x;
}
public void setX(double x)
{
this.x = x;
this.maxX = x + radiusX;
this.minX = x - radiusX;
}
public double getZ()
{
return z;
}
public void setZ(double z)
{
this.z = z;
this.maxZ = z + radiusZ;
this.minZ = z - radiusZ;
}
public int getRadiusX()
{
return radiusX;
}
public int getRadiusZ()
{
return radiusZ;
}
public void setRadiusX(int radiusX)
{
this.radiusX = radiusX;
this.maxX = x + radiusX;
this.minX = x - radiusX;
this.radiusXSquared = (double) radiusX * (double) radiusX;
this.radiusSquaredQuotient = this.radiusXSquared / this.radiusZSquared;
this.DefiniteRectangleX = Math.sqrt(.5 * this.radiusXSquared);
}
public void setRadiusZ(int radiusZ)
{
this.radiusZ = radiusZ;
this.maxZ = z + radiusZ;
this.minZ = z - radiusZ;
this.radiusZSquared = (double) radiusZ * (double) radiusZ;
this.radiusSquaredQuotient = this.radiusXSquared / this.radiusZSquared;
this.DefiniteRectangleZ = Math.sqrt(.5 * this.radiusZSquared);
}
public void setRadius(int radius)
{
setRadiusX(radius);
setRadiusZ(radius);
}
public Boolean getShape()
{
return shapeRound;
}
public void setShape(Boolean shapeRound)
{
this.shapeRound = shapeRound;
}
public boolean getWrapping()
{
return wrapping;
}
public void setWrapping(boolean wrap)
{
this.wrapping = wrap;
}
//</editor-fold>
public BorderData copy()
{
return new BorderData(x, z, radiusX, radiusZ, shapeRound, wrapping);
}
@Override
public String toString()
{
return String.format("radius %s at X: %s Z: %s%s%s",
(radiusX == radiusZ) ? radiusX : radiusX + "*" + radiusZ,
Config.COORD_FORMAT.format(x),
Config.COORD_FORMAT.format(z),
shapeRound != null
? String.format( " (shape override: %s)", Config.getShapeName(shapeRound) )
: "",
wrapping ? " (wrapping)" : ""
);
}
/** This algorithm of course needs to be fast, since it will be run very frequently */
public boolean insideBorder(double xLoc, double zLoc, boolean round)
{
// if this border has a shape override set, use it
if (shapeRound != null)
round = shapeRound;
// square border
if (!round)
return !(xLoc < minX || xLoc > maxX || zLoc < minZ || zLoc > maxZ);
// round border
else
{
// elegant round border checking algorithm is from rBorder by Reil with almost no changes, all credit to him for it
double X = Math.abs(x - xLoc);
double Z = Math.abs(z - zLoc);
if (X < DefiniteRectangleX && Z < DefiniteRectangleZ)
return true; // Definitely inside
else if (X >= radiusX || Z >= radiusZ)
return false; // Definitely outside
else if (X * X + Z * Z * radiusSquaredQuotient < radiusXSquared)
return true; // After further calculation, inside
else
return false; // Apparently outside, then
}
}
public boolean insideBorder(double xLoc, double zLoc)
{
return insideBorder(xLoc, zLoc, Config.getShapeRound());
}
public boolean insideBorder(Location loc)
{
return insideBorder(loc.posX, loc.posZ, Config.getShapeRound());
}
public boolean insideBorder(CoordXZ coord, boolean round)
{
return insideBorder(coord.x, coord.z, round);
}
public boolean insideBorder(CoordXZ coord)
{
return insideBorder(coord.x, coord.z, Config.getShapeRound());
}
public Location correctedPosition(Location loc, boolean round, boolean flying)
{
// if this border has a shape override set, use it
if (shapeRound != null)
round = shapeRound;
double xLoc = loc.posX;
double yLoc = loc.posY;
double zLoc = loc.posZ;
double knock = Config.getKnockBack();
// Make sure knockback is not too big for this border
if (knock >= radiusX * 2 || knock >= radiusZ * 2)
{
Log.warn("Knockback %.2f is too big for border. Defaulting to 3.0.", knock);
knock = 3.0;
}
// square border
if (!round)
{
if (wrapping)
{
if (xLoc <= minX)
xLoc = maxX - knock;
else if (xLoc >= maxX)
xLoc = minX + knock;
if (zLoc <= minZ)
zLoc = maxZ - knock;
else if (zLoc >= maxZ)
zLoc = minZ + knock;
}
else
{
if (xLoc <= minX)
xLoc = minX + knock;
else if (xLoc >= maxX)
xLoc = maxX - knock;
if (zLoc <= minZ)
zLoc = minZ + knock;
else if (zLoc >= maxZ)
zLoc = maxZ - knock;
}
}
// round border
else
{
// algorithm originally from: http://stackoverflow.com/q/300871/3354920
// modified by Lang Lukas to support elliptical border shape
// Transform the ellipse to a circle with radius 1 (we need to transform the point the same way)
double dX = xLoc - x;
double dZ = zLoc - z;
// Distance of the untransformed point from the center
double dU = Math.sqrt(dX *dX + dZ * dZ);
// Distance of the transformed point from the center
double dT = Math.sqrt(dX *dX / radiusXSquared + dZ * dZ / radiusZSquared);
// "Correction" factor for the distances
double f = (1 / dT - knock / dU);
if (wrapping)
{
xLoc = x - dX * f;
zLoc = z - dZ * f;
}
else
{
xLoc = x + dX * f;
zLoc = z + dZ * f;
}
}
int ixLoc = Location.locToBlock(xLoc);
int izLoc = Location.locToBlock(zLoc);
int icxLoc = CoordXZ.blockToChunk(ixLoc);
int icZLoc = CoordXZ.blockToChunk(izLoc);
// Make sure the chunk we're checking in is actually loaded
// TODO: should this be here?
Chunk tChunk = loc.world.getChunkFromBlockCoords(ixLoc, izLoc);
if (!tChunk.isChunkLoaded)
loc.world.theChunkProviderServer.loadChunk(icxLoc, icZLoc);
yLoc = getSafeY(loc.world, ixLoc, Location.locToBlock(yLoc), izLoc, flying);
if (yLoc == -1)
return null;
return new Location(loc.world, Math.floor(xLoc) + 0.5, yLoc, Math.floor(zLoc) + 0.5, loc.yaw, loc.pitch);
}
public Location correctedPosition(Location loc, boolean round)
{
return correctedPosition(loc, round, false);
}
public Location correctedPosition(Location loc)
{
return correctedPosition(loc, Config.getShapeRound(), false);
}
//these material IDs are acceptable for places to teleport player; breathable blocks and water
public static final LinkedHashSet<Integer> safeOpenBlocks = new LinkedHashSet<>(Arrays.asList(
new Integer[] {0, 6, 8, 9, 27, 28, 30, 31, 32, 37, 38, 39, 40, 50, 55, 59, 63, 64, 65, 66, 68, 69, 70, 71, 72, 75, 76, 77, 78, 83, 90, 93, 94, 96, 104, 105, 106, 115, 131, 132, 141, 142, 149, 150, 157, 171}
));
//these material IDs are ones we don't want to drop the player onto, like cactus or lava or fire or activated Ender portal
public static final LinkedHashSet<Integer> painfulBlocks = new LinkedHashSet<>(Arrays.asList(
new Integer[] {10, 11, 51, 81, 119}
));
// check if a particular spot consists of 2 breathable blocks over something relatively solid
private boolean isSafeSpot(WorldServer world, int X, int Y, int Z, boolean flying)
{
boolean safe = safeOpenBlocks.contains( Worlds.getBlockID(world, X, Y, Z) ) // target block open and safe
&& safeOpenBlocks.contains( Worlds.getBlockID(world, X, Y + 1, Z) ); // above target block open and safe
if (!safe || flying)
return safe;
int below = Worlds.getBlockID(world, X, Y - 1, Z);
return
(!safeOpenBlocks.contains(below) || below == 8 || below == 9) // below target block not open/breathable (so presumably solid), or is water
&& !painfulBlocks.contains(below); // below target block not painful
}
private static final int limBot = 1;
// find closest safe Y position from the starting position
private double getSafeY(WorldServer world, int X, int Y, int Z, boolean flying)
{
final int limTop = world.getHeight() - 2;
// Expanding Y search method adapted from Acru's code in the Nether plugin
for(int y1 = Y, y2 = Y; (y1 > limBot) || (y2 < limTop); y1--, y2++)
{
// Look below.
if (y1 > limBot)
if ( isSafeSpot(world, X, y1, Z, flying) )
return (double) y1;
// Look above.
if (y2 < limTop && y2 != y1)
if ( isSafeSpot(world, X, y2, Z, flying) )
return (double) y2;
}
// no safe Y location?!?!? Must be a rare spot in a Nether world or something
return -1.0;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
else if ( obj == null || obj.getClass() != this.getClass() )
return false;
BorderData test = (BorderData) obj;
return test.x == this.x
&& test.z == this.z
&& test.radiusX == this.radiusX
&& test.radiusZ == this.radiusZ;
}
@Override
public int hashCode()
{
return ((int) (this.x * 10) << 4)
+ (int) this.z
+ (this.radiusX << 2)
+ (this.radiusZ << 3);
}
}
| |
package de.kuschku.steamaudio.lib.util;
import de.kuschku.steamaudio.lib.error.IPLerror;
import de.kuschku.steamaudio.lib.audiobuffer.IPLAmbisonicsOrdering;
import de.kuschku.steamaudio.lib.compute.IPLComputeDeviceType;
import de.kuschku.steamaudio.lib.environment.Environment;
import de.kuschku.steamaudio.lib.geometry.IPLSphere;
import de.kuschku.steamaudio.lib.geometry.IPLVector3;
import de.kuschku.steamaudio.lib.probes.ProbeBatch;
import de.kuschku.steamaudio.lib.probes.ProbeBox;
import de.kuschku.steamaudio.lib.probes.ProbeManager;
import de.kuschku.steamaudio.lib.scene.*;
import de.kuschku.steamaudio.lib.simulation.SimulationData;
import org.bridj.BridJ;
import org.bridj.CRuntime;
import org.bridj.IntValuedEnum;
import org.bridj.Pointer;
import org.bridj.ann.Library;
import org.bridj.ann.Ptr;
import org.bridj.ann.Runtime;
import java.nio.ByteBuffer;
import static de.kuschku.steamaudio.lib.util.NativeUtil.pointerToString;
import static de.kuschku.steamaudio.lib.util.PointerHandle.reference;
import static org.bridj.Pointer.*;
@SuppressWarnings("unused")
public class SteamAudio {
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class binauralrenderer {
static {
BridJ.register();
}
public static void iplDestroyBinauralRenderer(Pointer<Pointer<?>> renderer) {
iplDestroyBinauralRenderer(getPeer(renderer));
}
protected native static void iplDestroyBinauralRenderer(@Ptr long renderer);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class panningeffect {
static {
BridJ.register();
}
public static void iplDestroyPanningEffect(Pointer<Pointer<?>> effect) {
iplDestroyPanningEffect(getPeer(effect));
}
protected native static void iplDestroyPanningEffect(@Ptr long effect);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class binauraleffect {
static {
BridJ.register();
}
public static void iplDestroyBinauralEffect(Pointer<Pointer<?>> effect) {
iplDestroyBinauralEffect(getPeer(effect));
}
protected native static void iplDestroyBinauralEffect(@Ptr long effect);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class virtualsurround {
static {
BridJ.register();
}
public static void iplDestroyVirtualSurroundEffect(Pointer<Pointer<?>> effect) {
iplDestroyVirtualSurroundEffect(getPeer(effect));
}
protected native static void iplDestroyVirtualSurroundEffect(@Ptr long effect);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class ambisonicspanning {
static {
BridJ.register();
}
public static void iplDestroyAmbisonicsPanningEffect(Pointer<Pointer<?>> effect) {
iplDestroyAmbisonicsPanningEffect(getPeer(effect));
}
protected native static void iplDestroyAmbisonicsPanningEffect(@Ptr long effect);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class ambisonics {
static {
BridJ.register();
}
public static void iplDestroyAmbisonicsBinauralEffect(Pointer<Pointer<?>> effect) {
iplDestroyAmbisonicsBinauralEffect(getPeer(effect));
}
protected native static void iplDestroyAmbisonicsBinauralEffect(@Ptr long effect);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class envrenderer {
static {
BridJ.register();
}
public static void iplDestroyEnvironmentalRenderer(Pointer<Pointer<?>> renderer) {
iplDestroyEnvironmentalRenderer(getPeer(renderer));
}
protected native static void iplDestroyEnvironmentalRenderer(@Ptr long renderer);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class simulation {
static {
BridJ.register();
}
public static void iplDestroySimulationData(Pointer<Pointer<?>> simulationData) {
iplDestroySimulationData(getPeer(simulationData));
}
protected native static void iplDestroySimulationData(@Ptr long simulationData);
public static int iplGetNumIrSamples(SimulationData simulationData) {
return iplGetNumIrSamples(getPeer(reference(simulationData)));
}
protected native static int iplGetNumIrSamples(@Ptr long simulationData);
public static int iplGetNumIrChannels(SimulationData simulationData) {
return iplGetNumIrChannels(getPeer(reference(simulationData)));
}
protected native static int iplGetNumIrChannels(@Ptr long simulationData);
public static void iplGetSimulationResult(SimulationData simulationData, int sourceIndex, int channel,
Pointer<Float> buffer) {
iplGetSimulationResult(getPeer(reference(simulationData)), sourceIndex, channel, getPeer(buffer));
}
protected native static void iplGetSimulationResult(@Ptr long simulationData, int sourceIndex, int channel,
@Ptr long buffer);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class directsound {
static {
BridJ.register();
}
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class conveffect {
static {
BridJ.register();
}
public static void iplDestroyConvolutionEffect(Pointer<Pointer<?>> effect) {
iplDestroyConvolutionEffect(getPeer(effect));
}
protected native static void iplDestroyConvolutionEffect(@Ptr long effect);
public static void iplSetConvolutionEffectName(PointerHandle effect, String name) {
iplSetConvolutionEffectName(getPeer(reference(effect)), getPeer(pointerToString(name)));
}
protected native static void iplSetConvolutionEffectName(@Ptr long effect, @Ptr long name);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class probes {
static {
BridJ.register();
}
public static void iplDestroyProbeBox(Pointer<Pointer<?>> probeBox) {
iplDestroyProbeBox(getPeer(probeBox));
}
protected native static void iplDestroyProbeBox(@Ptr long probeBox);
public static int iplGetProbeSpheres(ProbeBox probeBox, Pointer<IPLSphere> probeSpheres) {
return iplGetProbeSpheres(getPeer(reference(probeBox)), getPeer(probeSpheres));
}
protected native static int iplGetProbeSpheres(@Ptr long probeBox, @Ptr long probeSpheres);
public static int iplSaveProbeBox(ProbeBox probeBox, ByteBuffer data) {
return iplSaveProbeBox(getPeer(reference(probeBox)), getPeer(pointerToBytes(data)));
}
protected native static int iplSaveProbeBox(@Ptr long probeBox, @Ptr long data);
public static IPLerror iplLoadProbeBox(ByteBuffer data, int size, Pointer<Pointer<?>> probeBox) {
return IPLerror.fromValue(iplLoadProbeBox(getPeer(pointerToBytes(data)), size, getPeer(probeBox)));
}
protected native static int iplLoadProbeBox(@Ptr long data, int size, @Ptr long probeBox);
public static IPLerror iplCreateProbeBatch(Pointer<Pointer<?>> probeBatch) {
return IPLerror.fromValue(iplCreateProbeBatch(getPeer(probeBatch)));
}
protected native static int iplCreateProbeBatch(@Ptr long probeBatch);
public static void iplDestroyProbeBatch(Pointer<Pointer<?>> probeBatch) {
iplDestroyProbeBatch(getPeer(probeBatch));
}
protected native static void iplDestroyProbeBatch(@Ptr long probeBatch);
public static void iplAddProbeToBatch(ProbeBatch probeBatch, ProbeBox probeBox, int probeIndex) {
iplAddProbeToBatch(getPeer(reference(probeBatch)), getPeer(reference(probeBox)), probeIndex);
}
protected native static void iplAddProbeToBatch(@Ptr long probeBatch, @Ptr long probeBox, int probeIndex);
public static void iplFinalizeProbeBatch(ProbeBatch probeBatch) {
iplFinalizeProbeBatch(getPeer(reference(probeBatch)));
}
protected native static void iplFinalizeProbeBatch(@Ptr long probeBatch);
public static int iplSaveProbeBatch(ProbeBatch probeBatch, ByteBuffer data) {
return iplSaveProbeBatch(getPeer(reference(probeBatch)), getPeer(pointerToBytes(data)));
}
protected native static int iplSaveProbeBatch(@Ptr long probeBatch, @Ptr long data);
public static IPLerror iplLoadProbeBatch(ByteBuffer data, int size, Pointer<Pointer<?>> probeBatch) {
return IPLerror.fromValue(iplLoadProbeBatch(getPeer(pointerToBytes(data)), size, getPeer(probeBatch)));
}
protected native static int iplLoadProbeBatch(@Ptr long data, int size, @Ptr long probeBatch);
public static IPLerror iplCreateProbeManager(Pointer<Pointer<?>> probeManager) {
return IPLerror.fromValue(iplCreateProbeManager(getPeer(probeManager)));
}
protected native static int iplCreateProbeManager(@Ptr long probeManager);
public static void iplDestroyProbeManager(Pointer<Pointer<?>> probeManager) {
iplDestroyProbeManager(getPeer(probeManager));
}
protected native static void iplDestroyProbeManager(@Ptr long probeManager);
public static void iplAddProbeBatch(ProbeManager probeManager, ProbeBatch probeBatch) {
iplAddProbeBatch(getPeer(reference(probeManager)), getPeer(reference(probeBatch)));
}
protected native static void iplAddProbeBatch(@Ptr long probeManager, @Ptr long probeBatch);
public static void iplRemoveProbeBatch(ProbeManager probeManager, ProbeBatch probeBatch) {
iplRemoveProbeBatch(getPeer(reference(probeManager)), getPeer(reference(probeBatch)));
}
protected native static void iplRemoveProbeBatch(@Ptr long probeManager, @Ptr long probeBatch);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class baking {
static {
BridJ.register();
}
public static native void iplCancelBake();
public static void iplDeleteBakedDataByName(ProbeBox probeBox, String sourceName) {
iplDeleteBakedDataByName(getPeer(reference(probeBox)), getPeer(pointerToString(sourceName)));
}
protected native static void iplDeleteBakedDataByName(@Ptr long probeBox, @Ptr long sourceName);
public static int iplGetBakedDataSizeByName(ProbeBox probeBox, String sourceName) {
return iplGetBakedDataSizeByName(getPeer(reference(probeBox)), getPeer(pointerToString(sourceName)));
}
protected native static int iplGetBakedDataSizeByName(@Ptr long probeBox, @Ptr long sourceName);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class audiobuffer {
static {
BridJ.register();
}
public static IPLerror iplCreateAmbisonicsRotator(IPLAmbisonicsOrdering order, Pointer<Pointer<?>> rotator) {
return IPLerror.fromValue(iplCreateAmbisonicsRotator((int) order.value(), getPeer(rotator)));
}
protected native static int iplCreateAmbisonicsRotator(int order, @Ptr long rotator);
public static void iplDestroyAmbisonicsRotator(Pointer<Pointer<?>> rotator) {
iplDestroyAmbisonicsRotator(getPeer(rotator));
}
protected native static void iplDestroyAmbisonicsRotator(@Ptr long rotator);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class environment {
static {
BridJ.register();
}
public static void iplDestroyEnvironment(Pointer<Pointer<?>> environment) {
iplDestroyEnvironment(getPeer(environment));
}
protected native static void iplDestroyEnvironment(@Ptr long environment);
public static void iplSetNumBounces(Environment environment, int numBounces) {
iplSetNumBounces(getPeer(reference(environment)), numBounces);
}
protected native static void iplSetNumBounces(@Ptr long environment, int numBounces);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class geometry {
static {
BridJ.register();
}
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class compute {
static {
BridJ.register();
}
public static IPLerror iplCreateComputeDevice(IntValuedEnum<IPLComputeDeviceType> deviceType,
int numComputeUnits, Pointer<Pointer<?>> device) {
return IPLerror
.fromValue(iplCreateComputeDevice((int) deviceType.value(), numComputeUnits, getPeer(device)));
}
protected native static int iplCreateComputeDevice(int deviceType, int numComputeUnits, @Ptr long device);
public static void iplDestroyComputeDevice(Pointer<Pointer<?>> device) {
iplDestroyComputeDevice(getPeer(device));
}
protected native static void iplDestroyComputeDevice(@Ptr long device);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class scene {
static {
BridJ.register();
}
public static void iplDestroyScene(Pointer<Pointer<?>> scene) {
iplDestroyScene(getPeer(scene));
}
protected native static void iplDestroyScene(@Ptr long scene);
public static void iplSetRayTracerCallbacks(Scene scene, IPLClosestHitCallback closestHitCallback,
IPLAnyHitCallback anyHitCallback, Pointer<?> userData) {
iplSetRayTracerCallbacks(getPeer(reference(scene)), getPeer(getPointer(closestHitCallback)),
getPeer(getPointer(anyHitCallback)), getPeer(userData));
}
protected native static void iplSetRayTracerCallbacks(@Ptr long scene, @Ptr long closestHitCallback,
@Ptr long anyHitCallback, @Ptr long userData);
public static IPLerror iplCreateStaticMesh(Scene scene, int numVertices, int numTriangles,
Pointer<Pointer<?>> staticMesh) {
return IPLerror.fromValue(
iplCreateStaticMesh(getPeer(reference(scene)), numVertices, numTriangles, getPeer(staticMesh)));
}
protected native static int iplCreateStaticMesh(@Ptr long scene, int numVertices, int numTriangles,
@Ptr long staticMesh);
public static void iplDestroyStaticMesh(Pointer<Pointer<?>> staticMesh) {
iplDestroyStaticMesh(getPeer(staticMesh));
}
protected native static void iplDestroyStaticMesh(@Ptr long staticMesh);
public static void iplSetStaticMeshVertices(Scene scene, StaticMesh staticMesh, Pointer<IPLVector3> vertices) {
iplSetStaticMeshVertices(getPeer(reference(scene)), getPeer(reference(staticMesh)), getPeer(vertices));
}
protected native static void iplSetStaticMeshVertices(@Ptr long scene, @Ptr long staticMesh,
@Ptr long vertices);
public static void iplSetStaticMeshTriangles(Scene scene, StaticMesh staticMesh,
Pointer<IPLTriangle> triangles) {
iplSetStaticMeshTriangles(getPeer(reference(scene)), getPeer(reference(staticMesh)), getPeer(triangles));
}
protected native static void iplSetStaticMeshTriangles(@Ptr long scene, @Ptr long staticMesh,
@Ptr long triangles);
public static void iplSetStaticMeshMaterials(Scene scene, StaticMesh staticMesh,
Pointer<Integer> materialIndices) {
iplSetStaticMeshMaterials(getPeer(reference(scene)), getPeer(reference(staticMesh)),
getPeer(materialIndices));
}
protected native static void iplSetStaticMeshMaterials(@Ptr long scene, @Ptr long staticMesh,
@Ptr long materialIndices);
public static void iplFinalizeScene(Scene scene, IPLFinalizeSceneProgressCallback progressCallback) {
iplFinalizeScene(getPeer(reference(scene)), getPeer(getPointer(progressCallback)));
}
protected native static void iplFinalizeScene(@Ptr long scene, @Ptr long progressCallback);
public static IPLerror iplSaveFinalizedScene(Scene scene, String fileName) {
return IPLerror
.fromValue(iplSaveFinalizedScene(getPeer(reference(scene)), getPeer(pointerToString(fileName))));
}
protected native static int iplSaveFinalizedScene(@Ptr long scene, @Ptr long fileName);
public static void iplDumpSceneToObjFile(Scene scene, String fileBaseName) {
iplDumpSceneToObjFile(getPeer(reference(scene)), getPeer(pointerToString(fileBaseName)));
}
protected native static void iplDumpSceneToObjFile(@Ptr long scene, @Ptr long fileBaseName);
}
@Library("steamaudio")
@Runtime(CRuntime.class)
public static class context {
static {
BridJ.register();
}
public static native void iplInitializeCrashHandler();
public static native void iplTerminateCrashHandler();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import javax.jms.ExceptionListener;
import javax.jms.JMSException;
import javax.jms.Session;
import org.apache.activemq.broker.BrokerRegistry;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.TransportConnector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ActiveMQConnectionFactoryTest extends CombinationTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(ActiveMQConnectionFactoryTest.class);
private ActiveMQConnection connection;
private BrokerService broker;
public void testUseURIToSetUseClientIDPrefixOnConnectionFactory() throws URISyntaxException, JMSException {
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory(
"vm://localhost?jms.clientIDPrefix=Cheese");
assertEquals("Cheese", cf.getClientIDPrefix());
connection = (ActiveMQConnection)cf.createConnection();
connection.start();
String clientID = connection.getClientID();
LOG.info("Got client ID: " + clientID);
assertTrue("should start with Cheese! but was: " + clientID, clientID.startsWith("Cheese"));
}
@Override
public void tearDown() throws Exception {
// Try our best to close any previously opend connection.
try {
connection.close();
} catch (Throwable ignore) {
}
// Try our best to stop any previously started broker.
try {
broker.stop();
} catch (Throwable ignore) {
}
}
public void testUseURIToSetOptionsOnConnectionFactory() throws URISyntaxException, JMSException {
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory("vm://localhost?jms.useAsyncSend=true");
assertTrue(cf.isUseAsyncSend());
// the broker url have been adjusted.
assertEquals("vm://localhost", cf.getBrokerURL());
cf = new ActiveMQConnectionFactory("vm://localhost?jms.useAsyncSend=false");
assertFalse(cf.isUseAsyncSend());
// the broker url have been adjusted.
assertEquals("vm://localhost", cf.getBrokerURL());
cf = new ActiveMQConnectionFactory("vm:(broker:()/localhost)?jms.useAsyncSend=true");
assertTrue(cf.isUseAsyncSend());
// the broker url have been adjusted.
assertEquals("vm:(broker:()/localhost)", cf.getBrokerURL());
cf = new ActiveMQConnectionFactory("vm://localhost?jms.auditDepth=5000");
assertEquals(5000, cf.getAuditDepth());
}
public void testUseURIToConfigureRedeliveryPolicy() throws URISyntaxException, JMSException {
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory(
"vm://localhost?jms.redeliveryPolicy.maximumRedeliveries=2");
assertEquals("connection redeliveries", 2, cf.getRedeliveryPolicy().getMaximumRedeliveries());
ActiveMQConnection connection = (ActiveMQConnection)cf.createConnection();
assertEquals("connection redeliveries", 2, connection.getRedeliveryPolicy().getMaximumRedeliveries());
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
ActiveMQMessageConsumer consumer = (ActiveMQMessageConsumer)session.createConsumer(session
.createQueue("FOO.BAR"));
assertEquals("consumer redeliveries", 2, consumer.getRedeliveryPolicy().getMaximumRedeliveries());
connection.close();
}
public void testCreateVMConnectionWithEmbdeddBroker() throws URISyntaxException, JMSException {
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory("vm://myBroker2?broker.persistent=false");
// Make sure the broker is not created until the connection is
// instantiated.
assertNull(BrokerRegistry.getInstance().lookup("myBroker2"));
connection = (ActiveMQConnection)cf.createConnection();
// This should create the connection.
assertNotNull(connection);
// Verify the broker was created.
assertNotNull(BrokerRegistry.getInstance().lookup("myBroker2"));
connection.close();
// Verify the broker was destroyed.
assertNull(BrokerRegistry.getInstance().lookup("myBroker2"));
}
public void testGetBrokerName() throws URISyntaxException, JMSException {
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false");
connection = (ActiveMQConnection)cf.createConnection();
connection.start();
String brokerName = connection.getBrokerName();
LOG.info("Got broker name: " + brokerName);
assertNotNull("No broker name available!", brokerName);
}
public void testCreateTcpConnectionUsingAllocatedPort() throws Exception {
assertCreateConnection("tcp://localhost:0?wireFormat.tcpNoDelayEnabled=true");
}
public void testCreateTcpConnectionUsingKnownPort() throws Exception {
assertCreateConnection("tcp://localhost:61610?wireFormat.tcpNoDelayEnabled=true");
}
public void testCreateTcpConnectionUsingKnownLocalPort() throws Exception {
broker = new BrokerService();
broker.setPersistent(false);
broker.addConnector("tcp://localhost:61610?wireFormat.tcpNoDelayEnabled=true");
broker.start();
// This should create the connection.
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory("tcp://localhost:61610/localhost:51610");
connection = (ActiveMQConnection)cf.createConnection();
assertNotNull(connection);
connection.close();
broker.stop();
}
public void testConnectionFailsToConnectToVMBrokerThatIsNotRunning() throws Exception {
ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory("vm://localhost?create=false");
try {
factory.createConnection();
fail("Expected connection failure.");
} catch (JMSException e) {
}
}
public void testFactorySerializable() throws Exception {
String clientID = "TestClientID";
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory();
cf.setClientID(clientID);
ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
ObjectOutputStream objectsOut = new ObjectOutputStream(bytesOut);
objectsOut.writeObject(cf);
objectsOut.flush();
byte[] data = bytesOut.toByteArray();
ByteArrayInputStream bytesIn = new ByteArrayInputStream(data);
ObjectInputStream objectsIn = new ObjectInputStream(bytesIn);
cf = (ActiveMQConnectionFactory)objectsIn.readObject();
assertEquals(cf.getClientID(), clientID);
}
public void testSetExceptionListener() throws Exception {
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false");
connection = (ActiveMQConnection)cf.createConnection();
assertNull(connection.getExceptionListener());
ExceptionListener exListener = new ExceptionListener() {
@Override
public void onException(JMSException arg0) {
}
};
cf.setExceptionListener(exListener);
connection.close();
connection = (ActiveMQConnection)cf.createConnection();
assertNotNull(connection.getExceptionListener());
assertEquals(exListener, connection.getExceptionListener());
connection.close();
connection = (ActiveMQConnection)cf.createConnection();
assertEquals(exListener, connection.getExceptionListener());
assertEquals(exListener, cf.getExceptionListener());
connection.close();
}
public void testSetClientInternalExceptionListener() throws Exception {
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false");
connection = (ActiveMQConnection)cf.createConnection();
assertNull(connection.getClientInternalExceptionListener());
ClientInternalExceptionListener listener = new ClientInternalExceptionListener() {
@Override
public void onException(Throwable exception) {
}
};
connection.setClientInternalExceptionListener(listener);
cf.setClientInternalExceptionListener(listener);
connection.close();
connection = (ActiveMQConnection)cf.createConnection();
assertNotNull(connection.getClientInternalExceptionListener());
assertEquals(listener, connection.getClientInternalExceptionListener());
connection.close();
connection = (ActiveMQConnection)cf.createConnection();
assertEquals(listener, connection.getClientInternalExceptionListener());
assertEquals(listener, cf.getClientInternalExceptionListener());
connection.close();
}
protected void assertCreateConnection(String uri) throws Exception {
// Start up a broker with a tcp connector.
broker = new BrokerService();
broker.setPersistent(false);
broker.setUseJmx(false);
TransportConnector connector = broker.addConnector(uri);
broker.start();
URI temp = new URI(uri);
// URI connectURI = connector.getServer().getConnectURI();
// TODO this sometimes fails when using the actual local host name
URI currentURI = new URI(connector.getPublishableConnectString());
// sometimes the actual host name doesn't work in this test case
// e.g. on OS X so lets use the original details but just use the actual
// port
URI connectURI = new URI(temp.getScheme(), temp.getUserInfo(), temp.getHost(), currentURI.getPort(),
temp.getPath(), temp.getQuery(), temp.getFragment());
LOG.info("connection URI is: " + connectURI);
// This should create the connection.
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory(connectURI);
connection = (ActiveMQConnection)cf.createConnection();
assertNotNull(connection);
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.util.newProjectWizard;
import com.intellij.facet.impl.ui.libraries.LibraryCompositionSettings;
import com.intellij.facet.impl.ui.libraries.LibraryOptionsPanel;
import com.intellij.facet.ui.FacetBasedFrameworkSupportProvider;
import com.intellij.framework.FrameworkGroup;
import com.intellij.framework.FrameworkVersion;
import com.intellij.framework.addSupport.FrameworkSupportInModuleConfigurable;
import com.intellij.framework.addSupport.FrameworkSupportInModuleProvider;
import com.intellij.framework.addSupport.FrameworkVersionListener;
import com.intellij.ide.util.frameworkSupport.FrameworkSupportConfigurable;
import com.intellij.ide.util.frameworkSupport.FrameworkSupportProvider;
import com.intellij.ide.util.frameworkSupport.FrameworkSupportUtil;
import com.intellij.ide.util.newProjectWizard.impl.FrameworkSupportCommunicator;
import com.intellij.ide.util.newProjectWizard.impl.FrameworkSupportModelBase;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.roots.IdeaModifiableModelsProvider;
import com.intellij.openapi.roots.ModifiableRootModel;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.ui.configuration.projectRoot.LibrariesContainer;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.ui.VerticalFlowLayout;
import com.intellij.openapi.util.Comparing;
import com.intellij.ui.CheckedTreeNode;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultTreeModel;
import java.awt.*;
import java.util.*;
import java.util.List;
/**
* @author nik
*/
public class AddSupportForFrameworksPanel implements Disposable {
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.util.newProjectWizard.AddSupportForFrameworksStep");
@NonNls private static final String EMPTY_CARD = "empty";
private JPanel myMainPanel;
private JPanel myFrameworksPanel;
private JLabel myLabel;
private List<FrameworkSupportInModuleProvider> myProviders;
private List<FrameworkSupportNodeBase> myRoots;
private final LibrariesContainer myLibrariesContainer;
private final FrameworkSupportModelBase myModel;
private final JPanel myOptionsPanel;
private final FrameworksTree myFrameworksTree;
private final Map<FrameworkSupportNode, FrameworkSupportOptionsComponent> myInitializedOptionsComponents = new HashMap<FrameworkSupportNode, FrameworkSupportOptionsComponent>();
private final Map<FrameworkGroup<?>, JPanel> myInitializedGroupPanels = new HashMap<FrameworkGroup<?>, JPanel>();
private FrameworkSupportNodeBase myLastSelectedNode;
private Collection<FrameworkSupportNodeBase> myAssociatedFrameworks;
private final JPanel myAssociatedFrameworksPanel = new JPanel(new VerticalFlowLayout(VerticalFlowLayout.TOP, 0, 0, true, false));
public AddSupportForFrameworksPanel(final List<FrameworkSupportInModuleProvider> providers,
final FrameworkSupportModelBase model, boolean vertical) {
myModel = model;
myLibrariesContainer = model.getLibrariesContainer();
myLabel.setVisible(!vertical);
Splitter splitter = vertical ? new Splitter(true, 0.6f) : new Splitter(false, 0.3f, 0.1f, 0.7f);
myFrameworksTree = new FrameworksTree(model) {
@Override
protected void onNodeStateChanged(CheckedTreeNode node) {
if (!(node instanceof FrameworkSupportNode)) return;
final FrameworkSupportNode frameworkSupportNode = (FrameworkSupportNode)node;
if (frameworkSupportNode == getSelectedNode()) {
updateOptionsPanel();
}
final FrameworkSupportInModuleConfigurable configurable = frameworkSupportNode.getConfigurable();
configurable.onFrameworkSelectionChanged(node.isChecked());
myModel.onFrameworkSelectionChanged(frameworkSupportNode);
onFrameworkStateChanged();
}
};
model.addFrameworkVersionListener(new FrameworkVersionListener() {
@Override
public void versionChanged(FrameworkVersion version) {
((DefaultTreeModel)myFrameworksTree.getModel()).nodeChanged(getSelectedNode());
}
}, this);
setProviders(providers, Collections.<String>emptySet(), Collections.<String>emptySet());
myFrameworksTree.addTreeSelectionListener(new TreeSelectionListener() {
public void valueChanged(TreeSelectionEvent e) {
onSelectionChanged();
}
});
JPanel treePanel = new JPanel(new BorderLayout());
treePanel.add(ScrollPaneFactory.createScrollPane(myFrameworksTree), BorderLayout.CENTER);
treePanel.add(myAssociatedFrameworksPanel, BorderLayout.NORTH);
splitter.setFirstComponent(treePanel);
myOptionsPanel = new JPanel(new CardLayout());
myOptionsPanel.add(EMPTY_CARD, new JPanel());
splitter.setSecondComponent(myOptionsPanel);
myFrameworksPanel.add(splitter, BorderLayout.CENTER);
}
public void setProviders(List<FrameworkSupportInModuleProvider> providers, Set<String> associated, Set<String> preselected) {
myProviders = providers;
myAssociatedFrameworks = createNodes(myProviders, associated);
for (FrameworkSupportNodeBase node : myRoots) {
if (preselected.contains(node.getId())) {
node.setChecked(true);
}
}
setAssociatedFrameworks();
myFrameworksTree.setRoots(myRoots);
myFrameworksTree.setSelectionRow(0);
}
public void setAssociatedFrameworks() {
myAssociatedFrameworksPanel.setVisible(!myAssociatedFrameworks.isEmpty());
myAssociatedFrameworksPanel.removeAll();
for (FrameworkSupportNodeBase nodeBase : myAssociatedFrameworks) {
if (nodeBase instanceof FrameworkSupportNode) {
((FrameworkSupportNode)nodeBase).getConfigurable().onFrameworkSelectionChanged(true);
FrameworkSupportOptionsComponent component = initializeOptionsPanel((FrameworkSupportNode)nodeBase, false);
myAssociatedFrameworksPanel.add(component.getMainPanel());
}
else {
JPanel panel = initializeGroupPanel((FrameworkGroup<?>)nodeBase.getUserObject(), false);
myAssociatedFrameworksPanel.add(panel);
}
}
}
protected void onFrameworkStateChanged() {}
private void onSelectionChanged() {
if (!myFrameworksTree.isProcessingMouseEventOnCheckbox()) {
updateOptionsPanel();
}
final FrameworkSupportNodeBase selectedNode = getSelectedNode();
if (!Comparing.equal(selectedNode, myLastSelectedNode)) {
applyLibraryOptionsForSelected();
myLastSelectedNode = selectedNode;
}
}
@Override
public void dispose() {
}
private void applyLibraryOptionsForSelected() {
if (myLastSelectedNode instanceof FrameworkSupportNode) {
final FrameworkSupportOptionsComponent optionsComponent = myInitializedOptionsComponents.get((FrameworkSupportNode)myLastSelectedNode);
if (optionsComponent != null) {
final LibraryOptionsPanel optionsPanel = optionsComponent.getLibraryOptionsPanel();
if (optionsPanel != null) {
optionsPanel.apply();
}
}
}
}
private void updateOptionsPanel() {
final FrameworkSupportNodeBase node = getSelectedNode();
if (node instanceof FrameworkSupportNode) {
FrameworkSupportNode frameworkSupportNode = (FrameworkSupportNode)node;
initializeOptionsPanel(frameworkSupportNode, true);
showCard(frameworkSupportNode.getId());
UIUtil.setEnabled(myOptionsPanel, frameworkSupportNode.isChecked(), true);
frameworkSupportNode.getConfigurable().onFrameworkSelectionChanged(node.isChecked());
}
else if (node instanceof FrameworkGroupNode) {
FrameworkGroup<?> group = ((FrameworkGroupNode)node).getUserObject();
initializeGroupPanel(group, true);
showCard(group.getId());
UIUtil.setEnabled(myOptionsPanel, true, true);
}
else {
showCard(EMPTY_CARD);
}
}
private JPanel initializeGroupPanel(FrameworkGroup<?> group, boolean addToOptions) {
JPanel panel = myInitializedGroupPanels.get(group);
if (panel == null) {
FrameworkVersionComponent component = new FrameworkVersionComponent(myModel, group.getId(), group.getGroupVersions(), group.getPresentableName() + " version:");
panel = component.getMainPanel();
myInitializedGroupPanels.put(group, panel);
if (addToOptions) {
myOptionsPanel.add(group.getId(), panel);
}
}
return panel;
}
@Nullable
public FrameworkSupportNodeBase getSelectedNode() {
final FrameworkSupportNodeBase[] nodes = myFrameworksTree.getSelectedNodes(FrameworkSupportNodeBase.class, null);
return nodes.length == 1 ? nodes[0] : null;
}
private FrameworkSupportOptionsComponent initializeOptionsPanel(final FrameworkSupportNode node, boolean addToOptions) {
FrameworkSupportOptionsComponent component = myInitializedOptionsComponents.get(node);
if (component == null) {
final FrameworkSupportNodeBase parentNode = node.getParentNode();
if (parentNode instanceof FrameworkSupportNode) {
initializeOptionsPanel((FrameworkSupportNode)parentNode, addToOptions);
}
else if (parentNode instanceof FrameworkGroupNode) {
initializeGroupPanel(((FrameworkGroupNode)parentNode).getUserObject(), addToOptions);
}
component = new FrameworkSupportOptionsComponent(myModel, myLibrariesContainer, this,
node.getUserObject(), node.getConfigurable());
if (addToOptions) {
myOptionsPanel.add(node.getId(), component.getMainPanel());
}
myInitializedOptionsComponents.put(node, component);
}
return component;
}
private void showCard(String cardName) {
((CardLayout)myOptionsPanel.getLayout()).show(myOptionsPanel, cardName);
}
private List<LibraryCompositionSettings> getLibrariesCompositionSettingsList() {
List<LibraryCompositionSettings> list = new ArrayList<LibraryCompositionSettings>();
List<FrameworkSupportNode> selected = getSelectedNodes();
for (FrameworkSupportNode node : selected) {
ContainerUtil.addIfNotNull(list, getLibraryCompositionSettings(node));
}
return list;
}
@Nullable
private LibraryCompositionSettings getLibraryCompositionSettings(FrameworkSupportNode node) {
final FrameworkSupportOptionsComponent optionsComponent = myInitializedOptionsComponents.get(node);
return optionsComponent != null ? optionsComponent.getLibraryCompositionSettings() : null;
}
public boolean downloadLibraries() {
applyLibraryOptionsForSelected();
List<LibraryCompositionSettings> list = getLibrariesCompositionSettingsList();
for (LibraryCompositionSettings compositionSettings : list) {
if (!compositionSettings.downloadFiles(myMainPanel)) return false;
}
return true;
}
private Collection<FrameworkSupportNodeBase> createNodes(List<FrameworkSupportInModuleProvider> providers, Set<String> associated) {
Map<String, FrameworkSupportNode> nodes = new HashMap<String, FrameworkSupportNode>();
Map<FrameworkGroup<?>, FrameworkGroupNode> groups = new HashMap<FrameworkGroup<?>, FrameworkGroupNode>();
List<FrameworkSupportNodeBase> roots = new ArrayList<FrameworkSupportNodeBase>();
Map<String, FrameworkSupportNodeBase> associatedNodes = new LinkedHashMap<String, FrameworkSupportNodeBase>();
for (FrameworkSupportInModuleProvider provider : providers) {
createNode(provider, nodes, groups, roots, providers, associated, associatedNodes);
}
FrameworkSupportNodeBase.sortByName(roots);
myRoots = roots;
return associatedNodes.values();
}
@Nullable
private FrameworkSupportNode createNode(final FrameworkSupportInModuleProvider provider,
final Map<String, FrameworkSupportNode> nodes,
final Map<FrameworkGroup<?>, FrameworkGroupNode> groupNodes,
List<FrameworkSupportNodeBase> roots,
List<FrameworkSupportInModuleProvider> providers,
Set<String> associated,
Map<String, FrameworkSupportNodeBase> associatedNodes) {
String id = provider.getFrameworkType().getId();
FrameworkSupportNode node = nodes.get(id);
if (node != null || associatedNodes.containsKey(id)) {
return node;
}
String underlyingTypeId = provider.getFrameworkType().getUnderlyingFrameworkTypeId();
FrameworkSupportNodeBase parentNode = null;
final FrameworkGroup<?> group = provider.getFrameworkType().getParentGroup();
if (underlyingTypeId != null) {
FrameworkSupportInModuleProvider parentProvider = FrameworkSupportUtil.findProvider(underlyingTypeId, providers);
if (parentProvider == null) {
LOG.info("Cannot find id = " + underlyingTypeId);
return null;
}
parentNode = createNode(parentProvider, nodes, groupNodes, roots, providers, associated, associatedNodes);
}
else if (group != null) {
parentNode = groupNodes.get(group);
if (parentNode == null) {
FrameworkGroupNode groupNode = new FrameworkGroupNode(group, null);
if (associated.contains(groupNode.getId())) {
associatedNodes.put(groupNode.getId(), groupNode);
}
else {
groupNodes.put(group, groupNode);
parentNode = groupNode;
roots.add(groupNode);
}
}
}
node = new FrameworkSupportNode(provider, parentNode, myModel, this);
if (associated.contains(id)) {
associatedNodes.put(id, node);
}
else {
nodes.put(id, node);
if (parentNode == null) {
roots.add(node);
}
}
return node;
}
public JComponent getMainPanel() {
return myMainPanel;
}
public FrameworksTree getFrameworksTree() {
return myFrameworksTree;
}
public boolean hasSelectedFrameworks() {
return !getSelectedNodes().isEmpty();
}
private List<FrameworkSupportNode> getSelectedNodes() {
List<FrameworkSupportNode> list = new ArrayList<FrameworkSupportNode>();
if (myRoots != null) {
addChildFrameworks(myRoots, list);
}
list.addAll(ContainerUtil.mapNotNull(myAssociatedFrameworks, new Function.InstanceOf<FrameworkSupportNodeBase, FrameworkSupportNode>(FrameworkSupportNode.class)));
return list;
}
private static void addChildFrameworks(final List<FrameworkSupportNodeBase> list, final List<FrameworkSupportNode> result) {
for (FrameworkSupportNodeBase node : list) {
if (node.isChecked() || node instanceof FrameworkGroupNode) {
if (node instanceof FrameworkSupportNode) {
result.add((FrameworkSupportNode)node);
}
//noinspection unchecked
addChildFrameworks(node.getChildren(), result);
}
}
}
public void addSupport(final @NotNull Module module, final @NotNull ModifiableRootModel rootModel) {
List<Library> addedLibraries = new ArrayList<Library>();
List<FrameworkSupportNode> selectedFrameworks = getSelectedNodes();
sortFrameworks(selectedFrameworks);
List<FrameworkSupportConfigurable> selectedConfigurables = new ArrayList<FrameworkSupportConfigurable>();
final IdeaModifiableModelsProvider modifiableModelsProvider = new IdeaModifiableModelsProvider();
for (FrameworkSupportNode node : selectedFrameworks) {
FrameworkSupportInModuleConfigurable configurable = node.getConfigurable();
if (configurable instanceof OldFrameworkSupportProviderWrapper.FrameworkSupportConfigurableWrapper) {
selectedConfigurables.add(((OldFrameworkSupportProviderWrapper.FrameworkSupportConfigurableWrapper)configurable).getConfigurable());
}
final LibraryCompositionSettings settings = getLibraryCompositionSettings(node);
Library library = settings != null ? settings.addLibraries(rootModel, addedLibraries, myLibrariesContainer) : null;
if (configurable instanceof OldFrameworkSupportProviderWrapper.FrameworkSupportConfigurableWrapper) {
((OldFrameworkSupportProviderWrapper.FrameworkSupportConfigurableWrapper)configurable).getConfigurable().addSupport(module, rootModel,
library);
}
else {
configurable.addSupport(module, rootModel, modifiableModelsProvider);
}
}
for (FrameworkSupportNode node : selectedFrameworks) {
FrameworkSupportInModuleProvider provider = node.getUserObject();
if (provider instanceof OldFrameworkSupportProviderWrapper) {
final FrameworkSupportProvider oldProvider = ((OldFrameworkSupportProviderWrapper)provider).getProvider();
if (oldProvider instanceof FacetBasedFrameworkSupportProvider && !addedLibraries.isEmpty()) {
((FacetBasedFrameworkSupportProvider)oldProvider).processAddedLibraries(module, addedLibraries);
}
}
}
for (FrameworkSupportCommunicator communicator : FrameworkSupportCommunicator.EP_NAME.getExtensions()) {
communicator.onFrameworkSupportAdded(module, rootModel, selectedConfigurables, myModel);
}
}
private void sortFrameworks(final List<FrameworkSupportNode> nodes) {
final Comparator<FrameworkSupportInModuleProvider> comparator = FrameworkSupportUtil.getFrameworkSupportProvidersComparator(myProviders);
Collections.sort(nodes, new Comparator<FrameworkSupportNode>() {
public int compare(final FrameworkSupportNode o1, final FrameworkSupportNode o2) {
return comparator.compare(o1.getUserObject(), o2.getUserObject());
}
});
}
}
| |
package com.fasterxml.jackson.databind.deser.std;
import java.io.IOException;
import java.util.*;
import com.fasterxml.jackson.core.*;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.annotation.JacksonStdImpl;
import com.fasterxml.jackson.databind.jsontype.TypeDeserializer;
import com.fasterxml.jackson.databind.util.ObjectBuffer;
/**
* Deserializer implementation that is used if it is necessary to bind content of
* "unknown" type; something declared as basic {@link java.lang.Object}
* (either explicitly, or due to type erasure).
* If so, "natural" mapping is used to convert JSON values to their natural
* Java object matches: JSON arrays to Java {@link java.util.List}s (or, if configured,
* Object[]), JSON objects to {@link java.util.Map}s, numbers to
* {@link java.lang.Number}s, booleans to {@link java.lang.Boolean}s and
* strings to {@link java.lang.String} (and nulls to nulls).
*/
@JacksonStdImpl
public class UntypedObjectDeserializer
extends StdDeserializer<Object>
{
private final static Object[] NO_OBJECTS = new Object[0];
public UntypedObjectDeserializer() { super(Object.class); }
/*
/**********************************************************
/* Deserializer API
/**********************************************************
*/
@Override
public Object deserialize(JsonParser jp, DeserializationContext ctxt)
throws IOException, JsonProcessingException
{
switch (jp.getCurrentToken()) {
case START_OBJECT:
return mapObject(jp, ctxt);
case END_OBJECT: // invalid
break;
case START_ARRAY:
return mapArray(jp, ctxt);
case END_ARRAY: // invalid
break;
case FIELD_NAME:
return mapObject(jp, ctxt);
case VALUE_EMBEDDED_OBJECT:
return jp.getEmbeddedObject();
case VALUE_STRING:
return jp.getText();
case VALUE_NUMBER_INT:
/* [JACKSON-100]: caller may want to get all integral values
* returned as BigInteger, for consistency
*/
if (ctxt.isEnabled(DeserializationFeature.USE_BIG_INTEGER_FOR_INTS)) {
return jp.getBigIntegerValue(); // should be optimal, whatever it is
}
return jp.getNumberValue(); // should be optimal, whatever it is
case VALUE_NUMBER_FLOAT:
/* [JACKSON-72]: need to allow overriding the behavior regarding
* which type to use
*/
if (ctxt.isEnabled(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS)) {
return jp.getDecimalValue();
}
return Double.valueOf(jp.getDoubleValue());
case VALUE_TRUE:
return Boolean.TRUE;
case VALUE_FALSE:
return Boolean.FALSE;
case VALUE_NULL: // should not get this but...
return null;
}
throw ctxt.mappingException(Object.class);
}
@Override
public Object deserializeWithType(JsonParser jp, DeserializationContext ctxt,
TypeDeserializer typeDeserializer)
throws IOException, JsonProcessingException
{
JsonToken t = jp.getCurrentToken();
switch (t) {
// First: does it look like we had type id wrapping of some kind?
case START_ARRAY:
case START_OBJECT:
case FIELD_NAME:
/* Output can be as JSON Object, Array or scalar: no way to know
* a this point:
*/
return typeDeserializer.deserializeTypedFromAny(jp, ctxt);
/* Otherwise we probably got a "native" type (ones that map
* naturally and thus do not need or use type ids)
*/
case VALUE_STRING:
return jp.getText();
case VALUE_NUMBER_INT:
// For [JACKSON-100], see above:
if (ctxt.isEnabled(DeserializationFeature.USE_BIG_INTEGER_FOR_INTS)) {
return jp.getBigIntegerValue();
}
/* and as per [JACKSON-839], allow "upgrade" to bigger types: out-of-range
* entries can not be produced without type, so this should "just work",
* even if it is bit unclean
*/
return jp.getNumberValue();
case VALUE_NUMBER_FLOAT:
// For [JACKSON-72], see above
if (ctxt.isEnabled(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS)) {
return jp.getDecimalValue();
}
return Double.valueOf(jp.getDoubleValue());
case VALUE_TRUE:
return Boolean.TRUE;
case VALUE_FALSE:
return Boolean.FALSE;
case VALUE_EMBEDDED_OBJECT:
return jp.getEmbeddedObject();
case VALUE_NULL: // should not get this far really but...
return null;
}
throw ctxt.mappingException(Object.class);
}
/*
/**********************************************************
/* Internal methods
/**********************************************************
*/
/**
* Method called to map a JSON Array into a Java value.
*/
protected Object mapArray(JsonParser jp, DeserializationContext ctxt)
throws IOException, JsonProcessingException
{
if (ctxt.isEnabled(DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY)) {
return mapArrayToArray(jp, ctxt);
}
// Minor optimization to handle small lists (default size for ArrayList is 10)
if (jp.nextToken() == JsonToken.END_ARRAY) {
return new ArrayList<Object>(4);
}
ObjectBuffer buffer = ctxt.leaseObjectBuffer();
Object[] values = buffer.resetAndStart();
int ptr = 0;
int totalSize = 0;
do {
Object value = deserialize(jp, ctxt);
++totalSize;
if (ptr >= values.length) {
values = buffer.appendCompletedChunk(values);
ptr = 0;
}
values[ptr++] = value;
} while (jp.nextToken() != JsonToken.END_ARRAY);
// let's create almost full array, with 1/8 slack
ArrayList<Object> result = new ArrayList<Object>(totalSize + (totalSize >> 3) + 1);
buffer.completeAndClearBuffer(values, ptr, result);
return result;
}
/**
* Method called to map a JSON Object into a Java value.
*/
protected Object mapObject(JsonParser jp, DeserializationContext ctxt)
throws IOException, JsonProcessingException
{
JsonToken t = jp.getCurrentToken();
if (t == JsonToken.START_OBJECT) {
t = jp.nextToken();
}
// 1.6: minor optimization; let's handle 1 and 2 entry cases separately
if (t != JsonToken.FIELD_NAME) { // and empty one too
// empty map might work; but caller may want to modify... so better just give small modifiable
return new LinkedHashMap<String,Object>(4);
}
String field1 = jp.getText();
jp.nextToken();
Object value1 = deserialize(jp, ctxt);
if (jp.nextToken() != JsonToken.FIELD_NAME) { // single entry; but we want modifiable
LinkedHashMap<String, Object> result = new LinkedHashMap<String, Object>(4);
result.put(field1, value1);
return result;
}
String field2 = jp.getText();
jp.nextToken();
Object value2 = deserialize(jp, ctxt);
if (jp.nextToken() != JsonToken.FIELD_NAME) {
LinkedHashMap<String, Object> result = new LinkedHashMap<String, Object>(4);
result.put(field1, value1);
result.put(field2, value2);
return result;
}
// And then the general case; default map size is 16
LinkedHashMap<String, Object> result = new LinkedHashMap<String, Object>();
result.put(field1, value1);
result.put(field2, value2);
do {
String fieldName = jp.getText();
jp.nextToken();
result.put(fieldName, deserialize(jp, ctxt));
} while (jp.nextToken() != JsonToken.END_OBJECT);
return result;
}
/**
* Method called to map a JSON Array into a Java Object array (Object[]).
*/
protected Object[] mapArrayToArray(JsonParser jp, DeserializationContext ctxt)
throws IOException, JsonProcessingException
{
// Minor optimization to handle small lists (default size for ArrayList is 10)
if (jp.nextToken() == JsonToken.END_ARRAY) {
return NO_OBJECTS;
}
ObjectBuffer buffer = ctxt.leaseObjectBuffer();
Object[] values = buffer.resetAndStart();
int ptr = 0;
do {
Object value = deserialize(jp, ctxt);
if (ptr >= values.length) {
values = buffer.appendCompletedChunk(values);
ptr = 0;
}
values[ptr++] = value;
} while (jp.nextToken() != JsonToken.END_ARRAY);
return buffer.completeAndClearBuffer(values, ptr);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fields;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.Base64;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import static org.elasticsearch.client.Requests.refreshRequest;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
/**
*
*/
public class SearchFieldsTests extends ElasticsearchIntegrationTest {
@Test
public void testStoredFields() throws Exception {
createIndex("test");
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
// _timestamp and _size are randomly enabled via templates but we don't want it here to test stored fields behaviour
.startObject("_timestamp").field("enabled", false).endObject()
.startObject("_size").field("enabled", false).endObject()
.startObject("properties")
.startObject("field1").field("type", "string").field("store", "yes").endObject()
.startObject("field2").field("type", "string").field("store", "no").endObject()
.startObject("field3").field("type", "string").field("store", "yes").endObject()
.endObject().endObject().endObject().string();
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("field1", "value1")
.field("field2", "value2")
.field("field3", "value3")
.endObject()).execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field1").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1));
assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1"));
// field2 is not stored, check that it gets extracted from source
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field2").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1));
assertThat(searchResponse.getHits().getAt(0).fields().get("field2").value().toString(), equalTo("value2"));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field3").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1));
assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3"));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).source(), nullValue());
assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1"));
assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3"));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*").addField("_source").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).source(), notNullValue());
assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1"));
assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3"));
}
@Test
public void testScriptDocAndFields() throws Exception {
createIndex("test");
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("num1").field("type", "double").field("store", "yes").endObject()
.endObject().endObject().endObject().string();
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet();
client().prepareIndex("test", "type1", "1")
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).field("date", "1970-01-01T00:00:00").endObject())
.execute().actionGet();
client().admin().indices().prepareFlush().execute().actionGet();
client().prepareIndex("test", "type1", "2")
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).field("date", "1970-01-01T00:00:25").endObject())
.execute().actionGet();
client().admin().indices().prepareFlush().execute().actionGet();
client().prepareIndex("test", "type1", "3")
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).field("date", "1970-01-01T00:02:00").endObject())
.execute().actionGet();
client().admin().indices().refresh(refreshRequest()).actionGet();
logger.info("running doc['num1'].value");
SearchResponse response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.addScriptField("sNum1", new Script("doc['num1'].value"))
.addScriptField("sNum1_field", new Script("_fields['num1'].value"))
.addScriptField("date1", new Script("doc['date'].date.millis"))
.execute().actionGet();
assertNoFailures(response);
assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().getAt(0).isSourceEmpty(), equalTo(true));
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).fields().size(), equalTo(3));
assertThat((Double) response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0));
assertThat((Double) response.getHits().getAt(0).fields().get("sNum1_field").values().get(0), equalTo(1.0));
assertThat((Long) response.getHits().getAt(0).fields().get("date1").values().get(0), equalTo(0l));
assertThat(response.getHits().getAt(1).id(), equalTo("2"));
assertThat(response.getHits().getAt(1).fields().size(), equalTo(3));
assertThat((Double) response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0));
assertThat((Double) response.getHits().getAt(1).fields().get("sNum1_field").values().get(0), equalTo(2.0));
assertThat((Long) response.getHits().getAt(1).fields().get("date1").values().get(0), equalTo(25000l));
assertThat(response.getHits().getAt(2).id(), equalTo("3"));
assertThat(response.getHits().getAt(2).fields().size(), equalTo(3));
assertThat((Double) response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0));
assertThat((Double) response.getHits().getAt(2).fields().get("sNum1_field").values().get(0), equalTo(3.0));
assertThat((Long) response.getHits().getAt(2).fields().get("date1").values().get(0), equalTo(120000l));
logger.info("running doc['num1'].value * factor");
Map<String, Object> params = MapBuilder.<String, Object>newMapBuilder().put("factor", 2.0).map();
response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.addScriptField("sNum1", new Script("doc['num1'].value * factor", ScriptType.INLINE, null, params))
.execute().actionGet();
assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).fields().size(), equalTo(1));
assertThat((Double) response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0));
assertThat(response.getHits().getAt(1).id(), equalTo("2"));
assertThat(response.getHits().getAt(1).fields().size(), equalTo(1));
assertThat((Double) response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(4.0));
assertThat(response.getHits().getAt(2).id(), equalTo("3"));
assertThat(response.getHits().getAt(2).fields().size(), equalTo(1));
assertThat((Double) response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(6.0));
}
@Test
public void testUidBasedScriptFields() throws Exception {
prepareCreate("test").addMapping("type1", "num1", "type=long").execute().actionGet();
ensureYellow();
int numDocs = randomIntBetween(1, 30);
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs];
for (int i = 0; i < numDocs; i++) {
indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i))
.setSource(jsonBuilder().startObject().field("num1", i).endObject());
}
indexRandom(true, indexRequestBuilders);
SearchResponse response = client().prepareSearch()
.setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs)
.addScriptField("uid", new Script("_fields._uid.value")).get();
assertNoFailures(response);
assertThat(response.getHits().totalHits(), equalTo((long)numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
assertThat(response.getHits().getAt(i).fields().size(), equalTo(1));
assertThat((String)response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i)));
}
response = client().prepareSearch()
.setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs)
.addScriptField("id", new Script("_fields._id.value")).get();
assertNoFailures(response);
assertThat(response.getHits().totalHits(), equalTo((long)numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
assertThat(response.getHits().getAt(i).fields().size(), equalTo(1));
assertThat((String)response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i)));
}
response = client().prepareSearch()
.setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs)
.addScriptField("type", new Script("_fields._type.value")).get();
assertNoFailures(response);
assertThat(response.getHits().totalHits(), equalTo((long)numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
assertThat(response.getHits().getAt(i).fields().size(), equalTo(1));
assertThat((String)response.getHits().getAt(i).fields().get("type").value(), equalTo("type1"));
}
response = client().prepareSearch()
.setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs)
.addScriptField("id", new Script("_fields._id.value")).addScriptField("uid", new Script("_fields._uid.value"))
.addScriptField("type", new Script("_fields._type.value")).get();
assertNoFailures(response);
assertThat(response.getHits().totalHits(), equalTo((long)numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
assertThat(response.getHits().getAt(i).fields().size(), equalTo(3));
assertThat((String)response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i)));
assertThat((String)response.getHits().getAt(i).fields().get("type").value(), equalTo("type1"));
assertThat((String)response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i)));
}
}
@Test
public void testScriptFieldUsingSource() throws Exception {
createIndex("test");
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
client().prepareIndex("test", "type1", "1")
.setSource(jsonBuilder().startObject()
.startObject("obj1").field("test", "something").endObject()
.startObject("obj2").startArray("arr2").value("arr_value1").value("arr_value2").endArray().endObject()
.startArray("arr3").startObject().field("arr3_field1", "arr3_value1").endObject().endArray()
.endObject())
.execute().actionGet();
client().admin().indices().refresh(refreshRequest()).actionGet();
SearchResponse response = client().prepareSearch()
.setQuery(matchAllQuery())
.addScriptField("s_obj1", new Script("_source.obj1"))
.addScriptField("s_obj1_test", new Script("_source.obj1.test")).addScriptField("s_obj2", new Script("_source.obj2"))
.addScriptField("s_obj2_arr2", new Script("_source.obj2.arr2")).addScriptField("s_arr3", new Script("_source.arr3"))
.execute().actionGet();
assertThat("Failures " + Arrays.toString(response.getShardFailures()), response.getShardFailures().length, equalTo(0));
assertThat(response.getHits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something"));
Map<String, Object> sObj1 = response.getHits().getAt(0).field("s_obj1").value();
assertThat(sObj1.get("test").toString(), equalTo("something"));
assertThat(response.getHits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something"));
Map<String, Object> sObj2 = response.getHits().getAt(0).field("s_obj2").value();
List sObj2Arr2 = (List) sObj2.get("arr2");
assertThat(sObj2Arr2.size(), equalTo(2));
assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1"));
assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2"));
sObj2Arr2 = response.getHits().getAt(0).field("s_obj2_arr2").values();
assertThat(sObj2Arr2.size(), equalTo(2));
assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1"));
assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2"));
List sObj2Arr3 = response.getHits().getAt(0).field("s_arr3").values();
assertThat(((Map) sObj2Arr3.get(0)).get("arr3_field1").toString(), equalTo("arr3_value1"));
}
@Test
public void testPartialFields() throws Exception {
createIndex("test");
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
client().prepareIndex("test", "type1", "1").setSource(XContentFactory.jsonBuilder().startObject()
.field("field1", "value1")
.startObject("obj1")
.startArray("arr1")
.startObject().startObject("obj2").field("field2", "value21").endObject().endObject()
.startObject().startObject("obj2").field("field2", "value22").endObject().endObject()
.endArray()
.endObject()
.endObject())
.execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
}
@Test
public void testStoredFieldsWithoutSource() throws Exception {
createIndex("test");
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("_source").field("enabled", false).endObject()
.startObject("byte_field").field("type", "byte").field("store", "yes").endObject()
.startObject("short_field").field("type", "short").field("store", "yes").endObject()
.startObject("integer_field").field("type", "integer").field("store", "yes").endObject()
.startObject("long_field").field("type", "long").field("store", "yes").endObject()
.startObject("float_field").field("type", "float").field("store", "yes").endObject()
.startObject("double_field").field("type", "double").field("store", "yes").endObject()
.startObject("date_field").field("type", "date").field("store", "yes").endObject()
.startObject("boolean_field").field("type", "boolean").field("store", "yes").endObject()
.startObject("binary_field").field("type", "binary").field("store", "yes").endObject()
.endObject().endObject().endObject().string();
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("byte_field", (byte) 1)
.field("short_field", (short) 2)
.field("integer_field", 3)
.field("long_field", 4l)
.field("float_field", 5.0f)
.field("double_field", 6.0d)
.field("date_field", Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)))
.field("boolean_field", true)
.field("binary_field", Base64.encodeBytes("testing text".getBytes("UTF8")))
.endObject()).execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery())
.addField("byte_field")
.addField("short_field")
.addField("integer_field")
.addField("long_field")
.addField("float_field")
.addField("double_field")
.addField("date_field")
.addField("boolean_field")
.addField("binary_field")
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(9));
assertThat(searchResponse.getHits().getAt(0).fields().get("byte_field").value().toString(), equalTo("1"));
assertThat(searchResponse.getHits().getAt(0).fields().get("short_field").value().toString(), equalTo("2"));
assertThat(searchResponse.getHits().getAt(0).fields().get("integer_field").value(), equalTo((Object) 3));
assertThat(searchResponse.getHits().getAt(0).fields().get("long_field").value(), equalTo((Object) 4l));
assertThat(searchResponse.getHits().getAt(0).fields().get("float_field").value(), equalTo((Object) 5.0f));
assertThat(searchResponse.getHits().getAt(0).fields().get("double_field").value(), equalTo((Object) 6.0d));
String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC));
assertThat(searchResponse.getHits().getAt(0).fields().get("date_field").value(), equalTo((Object) dateTime));
assertThat(searchResponse.getHits().getAt(0).fields().get("boolean_field").value(), equalTo((Object) Boolean.TRUE));
assertThat(((BytesReference) searchResponse.getHits().getAt(0).fields().get("binary_field").value()).toBytesArray(), equalTo((BytesReference) new BytesArray("testing text".getBytes("UTF8"))));
}
@Test
public void testSearchFields_metaData() throws Exception {
client().prepareIndex("my-index", "my-type1", "1")
.setRouting("1")
.setSource(jsonBuilder().startObject().field("field1", "value").endObject())
.setRefresh(true)
.get();
SearchResponse searchResponse = client().prepareSearch("my-index")
.setTypes("my-type1")
.addField("field1").addField("_routing")
.get();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
assertThat(searchResponse.getHits().getAt(0).field("field1").isMetadataField(), equalTo(false));
assertThat(searchResponse.getHits().getAt(0).field("field1").getValue().toString(), equalTo("value"));
assertThat(searchResponse.getHits().getAt(0).field("_routing").isMetadataField(), equalTo(true));
assertThat(searchResponse.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1"));
}
@Test
public void testSearchFields_nonLeafField() throws Exception {
client().prepareIndex("my-index", "my-type1", "1")
.setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject())
.setRefresh(true)
.get();
assertFailures(client().prepareSearch("my-index").setTypes("my-type1").addField("field1"),
RestStatus.BAD_REQUEST,
containsString("field [field1] isn't a leaf field"));
}
@Test
public void testGetFields_complexField() throws Exception {
client().admin().indices().prepareCreate("my-index")
.setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))
.addMapping("my-type2", jsonBuilder().startObject().startObject("my-type2").startObject("properties")
.startObject("field1").field("type", "object").startObject("properties")
.startObject("field2").field("type", "object").startObject("properties")
.startObject("field3").field("type", "object").startObject("properties")
.startObject("field4").field("type", "string").field("store", "yes")
.endObject().endObject()
.endObject().endObject()
.endObject().endObject()
.endObject().endObject().endObject())
.get();
BytesReference source = jsonBuilder().startObject()
.startArray("field1")
.startObject()
.startObject("field2")
.startArray("field3")
.startObject()
.field("field4", "value1")
.endObject()
.endArray()
.endObject()
.endObject()
.startObject()
.startObject("field2")
.startArray("field3")
.startObject()
.field("field4", "value2")
.endObject()
.endArray()
.endObject()
.endObject()
.endArray()
.endObject().bytes();
client().prepareIndex("my-index", "my-type1", "1").setSource(source).get();
client().prepareIndex("my-index", "my-type2", "1").setRefresh(true).setSource(source).get();
String field = "field1.field2.field3.field4";
SearchResponse searchResponse = client().prepareSearch("my-index").setTypes("my-type1").addField(field).get();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
assertThat(searchResponse.getHits().getAt(0).field(field).isMetadataField(), equalTo(false));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2"));
searchResponse = client().prepareSearch("my-index").setTypes("my-type2").addField(field).get();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
assertThat(searchResponse.getHits().getAt(0).field(field).isMetadataField(), equalTo(false));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2"));
}
@Test // see #8203
public void testSingleValueFieldDatatField() throws ExecutionException, InterruptedException {
createIndex("test");
indexRandom(true, client().prepareIndex("test", "type", "1").setSource("test_field", "foobar"));
refresh();
SearchResponse searchResponse = client().prepareSearch("test").setTypes("type").setSource(new BytesArray(new BytesRef("{\"query\":{\"match_all\":{}},\"fielddata_fields\": \"test_field\"}"))).get();
assertHitCount(searchResponse, 1);
Map<String,SearchHitField> fields = searchResponse.getHits().getHits()[0].getFields();
assertThat((String)fields.get("test_field").value(), equalTo("foobar"));
}
@Test(expected = SearchPhaseExecutionException.class)
public void testInvalidFieldDataField() throws ExecutionException, InterruptedException {
createIndex("test");
if (randomBoolean()) {
client().prepareSearch("test").setTypes("type").setSource(new BytesArray(new BytesRef("{\"query\":{\"match_all\":{}},\"fielddata_fields\": {}}"))).get();
} else {
client().prepareSearch("test").setTypes("type").setSource(new BytesArray(new BytesRef("{\"query\":{\"match_all\":{}},\"fielddata_fields\": 1.0}"))).get();
}
}
@Test
public void testFieldsPulledFromFieldData() throws Exception {
createIndex("test");
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("_source").field("enabled", false).endObject()
.startObject("string_field").field("type", "string").endObject()
.startObject("byte_field").field("type", "byte").endObject()
.startObject("short_field").field("type", "short").endObject()
.startObject("integer_field").field("type", "integer").endObject()
.startObject("long_field").field("type", "long").endObject()
.startObject("float_field").field("type", "float").endObject()
.startObject("double_field").field("type", "double").endObject()
.startObject("date_field").field("type", "date").endObject()
.startObject("boolean_field").field("type", "boolean").endObject()
.startObject("binary_field").field("type", "binary").endObject()
.endObject().endObject().endObject().string();
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("string_field", "foo")
.field("byte_field", (byte) 1)
.field("short_field", (short) 2)
.field("integer_field", 3)
.field("long_field", 4l)
.field("float_field", 5.0f)
.field("double_field", 6.0d)
.field("date_field", Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)))
.field("boolean_field", true)
.endObject()).execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery())
.addFieldDataField("string_field")
.addFieldDataField("byte_field")
.addFieldDataField("short_field")
.addFieldDataField("integer_field")
.addFieldDataField("long_field")
.addFieldDataField("float_field")
.addFieldDataField("double_field")
.addFieldDataField("date_field")
.addFieldDataField("boolean_field");
SearchResponse searchResponse = builder.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(9));
assertThat(searchResponse.getHits().getAt(0).fields().get("byte_field").value().toString(), equalTo("1"));
assertThat(searchResponse.getHits().getAt(0).fields().get("short_field").value().toString(), equalTo("2"));
assertThat(searchResponse.getHits().getAt(0).fields().get("integer_field").value(), equalTo((Object) 3l));
assertThat(searchResponse.getHits().getAt(0).fields().get("long_field").value(), equalTo((Object) 4l));
assertThat(searchResponse.getHits().getAt(0).fields().get("float_field").value(), equalTo((Object) 5.0));
assertThat(searchResponse.getHits().getAt(0).fields().get("double_field").value(), equalTo((Object) 6.0d));
assertThat(searchResponse.getHits().getAt(0).fields().get("date_field").value(), equalTo((Object) 1332374400000L));
assertThat(searchResponse.getHits().getAt(0).fields().get("boolean_field").value(), equalTo((Object) 1L));
}
public void testScriptFields() throws Exception {
assertAcked(prepareCreate("index").addMapping("type",
"s", "type=string,index=not_analyzed",
"l", "type=long",
"d", "type=double",
"ms", "type=string,index=not_analyzed",
"ml", "type=long",
"md", "type=double").get());
final int numDocs = randomIntBetween(3, 8);
List<IndexRequestBuilder> reqs = new ArrayList<>();
for (int i = 0; i < numDocs; ++i) {
reqs.add(client().prepareIndex("index", "type", Integer.toString(i)).setSource(
"s", Integer.toString(i),
"ms", new String[] {Integer.toString(i), Integer.toString(i+1)},
"l", i,
"ml", new long[] {i, i+1},
"d", i,
"md", new double[] {i, i+1}));
}
indexRandom(true, reqs);
ensureSearchable();
SearchRequestBuilder req = client().prepareSearch("index");
for (String field : Arrays.asList("s", "ms", "l", "ml", "d", "md")) {
req.addScriptField(field, new Script("doc['" + field + "'].values"));
}
SearchResponse resp = req.get();
assertSearchResponse(resp);
for (SearchHit hit : resp.getHits().getHits()) {
final int id = Integer.parseInt(hit.getId());
Map<String, SearchHitField> fields = hit.getFields();
assertThat(fields.get("s").getValues(), equalTo(Collections.<Object> singletonList(Integer.toString(id))));
assertThat(fields.get("l").getValues(), equalTo(Collections.<Object> singletonList((long) id)));
assertThat(fields.get("d").getValues(), equalTo(Collections.<Object> singletonList((double) id)));
assertThat(fields.get("ms").getValues(), equalTo(Arrays.<Object> asList(Integer.toString(id), Integer.toString(id + 1))));
assertThat(fields.get("ml").getValues(), equalTo(Arrays.<Object> asList((long) id, id + 1L)));
assertThat(fields.get("md").getValues(), equalTo(Arrays.<Object> asList((double) id, id + 1d)));
}
}
}
| |
/**
* Copyright 2016 Thomas Cashman
*/
package org.mini2Dx.core.engine.geom;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.mini2Dx.core.engine.PositionChangeListener;
import org.mini2Dx.core.engine.Positionable;
import org.mini2Dx.core.engine.SizeChangeListener;
import org.mini2Dx.core.engine.Sizeable;
import org.mini2Dx.core.game.GameContainer;
import org.mini2Dx.core.geom.Polygon;
import org.mini2Dx.core.geom.Shape;
import org.mini2Dx.core.graphics.Graphics;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Vector2;
/**
*
*/
public class CollisionPolygon extends Polygon implements CollisionShape {
private final int id;
private final ReentrantReadWriteLock positionChangeListenerLock;
private final ReentrantReadWriteLock sizeChangeListenerLock;
private List<PositionChangeListener> positionChangeListeners;
private List<SizeChangeListener> sizeChangeListeners;
private Polygon previousPolygon;
private Polygon renderPolygon;
private int renderX, renderY;
private boolean interpolate = false;
public CollisionPolygon(float [] vertices) {
this(CollisionIdSequence.nextId(), vertices);
}
public CollisionPolygon(Vector2[] vectors) {
this(CollisionIdSequence.nextId(), vectors);
}
public CollisionPolygon(int id, float[] vertices) {
super(vertices);
this.id = id;
positionChangeListenerLock = new ReentrantReadWriteLock();
sizeChangeListenerLock = new ReentrantReadWriteLock();
}
public CollisionPolygon(int id, Vector2[] vectors) {
super(vectors);
this.id = id;
positionChangeListenerLock = new ReentrantReadWriteLock();
sizeChangeListenerLock = new ReentrantReadWriteLock();
}
private void storeRenderCoordinates() {
renderX = MathUtils.round(renderPolygon.getX());
renderY = MathUtils.round(renderPolygon.getY());
}
@Override
public void preUpdate() {
previousPolygon.set(this);
}
@Override
public void update(GameContainer gc, float delta) {}
@Override
public void interpolate(GameContainer gc, float alpha) {
if(!interpolate) {
return;
}
renderPolygon.set(previousPolygon.lerp(this, alpha));
storeRenderCoordinates();
if(renderX != MathUtils.round(this.getX())) {
return;
}
if(renderY != MathUtils.round(this.getY())) {
return;
}
interpolate = false;
}
@Override
public void draw(Graphics g) {
renderPolygon.draw(g);
}
@Override
public void fill(Graphics g) {
renderPolygon.fill(g);
}
@Override
public int getId() {
return id;
}
@Override
public float getDistanceTo(Positionable positionable) {
return getDistanceTo(positionable.getX(), positionable.getY());
}
@Override
public <T extends Positionable> void addPostionChangeListener(
PositionChangeListener<T> listener) {
positionChangeListenerLock.writeLock().lock();
if (positionChangeListeners == null) {
positionChangeListeners = new ArrayList<PositionChangeListener>(1);
}
positionChangeListeners.add(listener);
positionChangeListenerLock.writeLock().unlock();
}
@Override
public <T extends Positionable> void removePositionChangeListener(
PositionChangeListener<T> listener) {
positionChangeListenerLock.readLock().lock();
if (positionChangeListeners == null) {
positionChangeListenerLock.readLock().unlock();
return;
}
positionChangeListenerLock.readLock().unlock();
positionChangeListenerLock.writeLock().lock();
positionChangeListeners.remove(listener);
positionChangeListenerLock.writeLock().unlock();
}
private void notifyPositionChangeListeners() {
positionChangeListenerLock.readLock().lock();
if (positionChangeListeners == null) {
positionChangeListenerLock.readLock().unlock();
return;
}
for (int i = positionChangeListeners.size() - 1; i >= 0; i--) {
if(i >= positionChangeListeners.size()) {
i = positionChangeListeners.size() - 1;
}
PositionChangeListener listener = positionChangeListeners.get(i);
positionChangeListenerLock.readLock().unlock();
listener.positionChanged(this);
positionChangeListenerLock.readLock().lock();
}
positionChangeListenerLock.readLock().unlock();
}
@Override
public <T extends Sizeable> void addSizeChangeListener(SizeChangeListener<T> listener) {
sizeChangeListenerLock.writeLock().lock();
if (sizeChangeListeners == null) {
sizeChangeListeners = new ArrayList<SizeChangeListener>(1);
}
sizeChangeListeners.add(listener);
sizeChangeListenerLock.writeLock().unlock();
}
@Override
public <T extends Sizeable> void removeSizeChangeListener(SizeChangeListener<T> listener) {
sizeChangeListenerLock.readLock().lock();
if (sizeChangeListeners == null) {
sizeChangeListenerLock.readLock().unlock();
return;
}
sizeChangeListenerLock.readLock().unlock();
sizeChangeListenerLock.writeLock().lock();
sizeChangeListeners.remove(listener);
sizeChangeListenerLock.writeLock().unlock();
}
private void notifySizeChangeListeners() {
sizeChangeListenerLock.readLock().lock();
if (sizeChangeListeners == null) {
sizeChangeListenerLock.readLock().unlock();
return;
}
for (int i = sizeChangeListeners.size() - 1; i >= 0; i--) {
if(i >= sizeChangeListeners.size()) {
i = sizeChangeListeners.size() - 1;
}
SizeChangeListener listener = sizeChangeListeners.get(i);
sizeChangeListenerLock.readLock().unlock();
listener.sizeChanged(this);
sizeChangeListenerLock.readLock().lock();
}
sizeChangeListenerLock.readLock().unlock();
}
@Override
public void addPoint(float x, float y) {
super.addPoint(x, y);
notifyPositionChangeListeners();
notifySizeChangeListeners();
interpolate = true;
}
@Override
public void removePoint(float x, float y) {
super.removePoint(x, y);
notifyPositionChangeListeners();
notifySizeChangeListeners();
interpolate = true;
}
@Override
public void setX(float x) {
if(x == getX()) {
return;
}
super.setX(x);
notifyPositionChangeListeners();
interpolate = true;
}
@Override
public void setY(float y) {
if(y == getY()) {
return;
}
super.setY(y);
notifyPositionChangeListeners();
interpolate = true;
}
@Override
public void set(float x, float y) {
if(x == getX() && y == getY()) {
return;
}
super.set(x, y);
notifyPositionChangeListeners();
interpolate = true;
}
@Override
public void forceTo(float x, float y) {
boolean notifyPositionListeners = x != getX() || y != getY();
super.set(x, y);
previousPolygon.set(this);
renderPolygon.set(previousPolygon);
if(notifyPositionListeners) {
notifyPositionChangeListeners();
}
}
@Override
public void setRotation(float degrees) {
if(getRotation() == degrees) {
return;
}
super.setRotation(degrees);
notifyPositionChangeListeners();
interpolate = true;
}
@Override
public void rotate(float degrees) {
if(degrees == 0) {
return;
}
super.rotate(degrees);
notifyPositionChangeListeners();
interpolate = true;
}
@Override
public void setRotationAround(float centerX, float centerY, float degrees) {
if(getRotation() == degrees && centerX == getOriginX() && centerY == getOriginY()) {
return;
}
super.setRotationAround(centerX, centerY, degrees);
notifyPositionChangeListeners();
interpolate = true;
}
@Override
public void rotateAround(float centerX, float centerY, float degrees) {
if(degrees == 0) {
return;
}
super.rotateAround(centerX, centerY, degrees);
notifyPositionChangeListeners();
interpolate = true;
}
@Override
public void setVertices(float[] vertices) {
super.setVertices(vertices);
notifyPositionChangeListeners();
notifySizeChangeListeners();
interpolate = true;
}
@Override
public void setVertices(Vector2[] vertices) {
super.setVertices(vertices);
notifyPositionChangeListeners();
notifySizeChangeListeners();
interpolate = true;
}
@Override
public int getRenderX() {
return renderX;
}
@Override
public int getRenderY() {
return renderY;
}
@Override
public float getWidth() {
return getMaxX() - getMinX();
}
@Override
public float getHeight() {
return getMaxY() - getMinY();
}
@Override
public Shape getShape() {
return this;
}
}
| |
package org.vaadin.alump.distributionbar;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import com.vaadin.annotations.VaadinServletConfiguration;
import com.vaadin.server.VaadinServlet;
import com.vaadin.shared.ui.ContentMode;
import com.vaadin.ui.*;
import com.vaadin.annotations.Theme;
import com.vaadin.annotations.Title;
import com.vaadin.server.VaadinRequest;
import javax.servlet.annotation.WebServlet;
@SuppressWarnings("serial")
@Theme("demo")
@Title("Distribution Bar Demo")
public class DistributionBarDemoUI extends UI {
@WebServlet(value = "/*")
@VaadinServletConfiguration(productionMode = false, ui = DistributionBarDemoUI.class, widgetset = "org.vaadin.alump.distributionbar.demo.widgetset.DBarDemoWidgetset")
public static class FancyLayoutsUIServlet extends VaadinServlet {
}
private List<DistributionBar> bars = new ArrayList<DistributionBar>();
final static private int BAR_ONE_PARTS = 2;
final static private int BAR_TWO_PARTS = 3;
final static private int BAR_THREE_PARTS = 6;
final static private int BAR_FOUR_PARTS = 10;
final static private int BAR_FIVE_PARTS = 2;
final static private int BAR_SIX_PARTS = 11;
final static private int BAR_SEVEN_PARTS = 3;
final static private int BAR_EIGHT_PARTS = 8;
final static private String TYPES[] = {"cows", "chickens", "cats", "cookies", "commandos", "canadians", "cooks",
"captains", "cents", "caught exceptions"};
private final Random rand = new Random(0xDEADBEEF);
private Label changingLabel;
@Override
protected void init(VaadinRequest request) {
setContent(buildView());
}
private ComponentContainer buildView() {
VerticalLayout layout = new VerticalLayout();
layout.setWidth("100%");
layout.setSpacing(true);
layout.setMargin(true);
Label header = new Label(
"Distribution Bar Demo! <b>Remember to push the random button!</b> Tooltips are shown when you move "
+ "mouse above the distribution bars. Not all parts have tooltips in this demo.", ContentMode.HTML);
layout.addComponent(header);
HorizontalLayout buttonLayout = new HorizontalLayout();
buttonLayout.setWidth("100%");
buttonLayout.setSpacing(true);
layout.addComponent(buttonLayout);
Button randomButton = new Button("Randomize", event -> randomUpdate(false));
buttonLayout.addComponent(randomButton);
Button random2Button = new Button("Randomize2", event -> randomUpdate(true));
buttonLayout.addComponent(random2Button);
CheckBox shrink = new CheckBox("Shrink zero values");
shrink.addValueChangeListener(event -> {
boolean shrinkIt = event.getValue();
for (DistributionBar bar : bars) {
bar.setZeroSizedVisible(!shrinkIt);
}
});
buttonLayout.addComponent(shrink);
DistributionBar barOne = new DistributionBar(BAR_ONE_PARTS);
barOne.setCaption("Senate (with clicks):");
barOne.setWidth("100%");
barOne.addStyleName("my-bar-one");
barOne.addDistributionBarClickListener(event -> {
if (event.getPartIndex() == 0) {
Notification.show("Republican clicked!");
} else {
Notification.show("Democratic clicked!");
}
});
layout.addComponent(barOne);
bars.add(barOne);
DistributionBar barTwo = new DistributionBar(BAR_TWO_PARTS);
barTwo.setCaption("Do people like nicer backgrounds?");
barTwo.setWidth("100%");
barTwo.addStyleName("my-bar-two");
// REMEMBER THAT tooltip is XHTML! Escape < and >!
barTwo.setPartTooltip(0, "Check the one on the right->")
.setPartCaption(0, "Blue");
barTwo.setPartTooltip(1, "<img src=\"http://alump.iki.fi/avatar.png\" />")
.setPartCaption(1, "Yellow");
barTwo.setPartTooltip(2, "<- Check the one on the left")
.setPartCaption(2, "Red");
layout.addComponent(barTwo);
bars.add(barTwo);
DistributionBar barThree = new DistributionBar(BAR_THREE_PARTS);
barThree.addDistributionBarClickListener(event -> showClickDetails(event));
barThree.setCaption("Maaaany parts with default styling (and click details)");
barThree.setWidth("100%");
barThree.addStyleName("my-bar-three");
layout.addComponent(barThree);
bars.add(barThree);
DistributionBar barFour = new DistributionBar(BAR_FOUR_PARTS);
barFour.setCaption("CSS tricks");
barFour.setWidth("100%");
barFour.addStyleName("my-bar-four");
barFour.setPartTooltip(BAR_FOUR_PARTS - 1, "Wow! You found this.");
barFour.setPartStyleName(BAR_FOUR_PARTS - 1, "hidden-part");
layout.addComponent(barFour);
bars.add(barFour);
DistributionBar barFive = new DistributionBar(BAR_FIVE_PARTS);
barFive.setCaption("Vote results:");
barFive.setWidth("100%");
barFive.addStyleName("my-bar-five");
barFive.setPartTooltip(
0,
"<span style=\"color: green; font-size: 200%; font-weight: bold;\">YES! I LIKE IT!</span>");
barFive.setPartTooltip(
1,
"<span style=\"color: red; font-size: 200%; font-weight: bold;\">NO WAY!</span>");
barFive.setPartTitle(1, "NO!");
layout.addComponent(barFive);
bars.add(barFive);
DistributionBar barSix = new DistributionBar(BAR_SIX_PARTS);
barSix.setCaption("Change in part count:");
barSix.setWidth("100%");
barSix.addStyleName("my-bar-six");
layout.addComponent(barSix);
bars.add(barSix);
HorizontalLayout resizingLayout = new HorizontalLayout();
resizingLayout.setSpacing(true);
resizingLayout.setWidth("100%");
layout.addComponent(resizingLayout);
resizingLayout.setCaption("Dynamic layout test:");
changingLabel = new Label("changing label");
changingLabel.setSizeUndefined();
resizingLayout.addComponent(changingLabel);
DistributionBar barSeven = new DistributionBar(BAR_SEVEN_PARTS);
barSeven.addStyleName("my-bar-seven");
barSeven.setWidth("100%");
resizingLayout.addComponent(barSeven);
resizingLayout.setExpandRatio(barSeven, 1.0f);
for(int i = 0; i < BAR_SEVEN_PARTS; ++i) {
barSeven.setPartTitle(i, "foo");
barSeven.setPartSize(i, 1 + i);
}
bars.add(barSeven);
DistributionBar barEight = new DistributionBar(BAR_EIGHT_PARTS);
barEight.setCaption("Override of min width of 100 pixels");
barEight.addStyleName("my-bar-eight");
barEight.setWidth(300, Unit.PIXELS);
barEight.setMinPartWidth(100);
layout.addComponent(barEight);
bars.add(barEight);
return layout;
}
private void showClickDetails(DistributionBarClickEvent event) {
Window window = new Window();
window.setResizable(false);
window.setCaption("Click details");
window.setPositionY(event.getClientY());
window.setPositionX(event.getClientX());
VerticalLayout layout = new VerticalLayout();
window.setContent(layout);
layout.setSpacing(true);
layout.setMargin(true);
layout.addComponent(createLabel("Part index", "#" + event.getPartIndex() + ", of " + event.getDistributionBar().getNumberOfParts() + " items"));
layout.addComponent(createLabel("Part size", "" + event.getDistributionBar().getPartSize(event.getPartIndex())));
String title = event.getDistributionBar().getPartTitle(event.getPartIndex());
layout.addComponent(createLabel("Part title", title == null ? "null" : title));
layout.addComponent(createLabel("Client coordinates", "X: " + event.getClientX() + ", Y:" + event.getClientY()));
layout.addComponent(createLabel("Relative coordinates", "X: " + event.getRelativeX() + ", Y:" + event.getRelativeY()));
layout.addComponent(createLabel("Modifiers", "Shift:" + event.isShiftKey() + ", Alt:" + event.isAltKey()
+ ", Ctrl:" + event.isCtrlKey() + ", Meta:" + event.isMetaKey()));
getUI().addWindow(window);
}
private static Label createLabel(String caption, String text) {
Label label = new Label(text);
label.setCaption(caption);
return label;
}
private void randomUpdate(boolean useZeros) {
DistributionBar barOne = bars.get(0);
int chairs = 100;
int groupA = useZeros ? (rand.nextInt(2) * 25) : rand.nextInt(chairs + 1);
int groupB = chairs - groupA;
barOne.setPartSize(0, groupA, Integer.toString(groupA) + " votes")
.setPartSize(1, groupB, Integer.toString(groupB) + " votes");
// ----
DistributionBar barTwo = bars.get(1);
for (int i = 0; i < BAR_TWO_PARTS; ++i) {
if(useZeros && i > 0 && i < (BAR_TWO_PARTS - 1) && rand.nextBoolean()) {
barTwo.setPartSize(i, 0);
} else {
barTwo.setPartSize(i, rand.nextInt(20));
}
}
// ----
DistributionBar barThree = bars.get(2);
for (int i = 0; i < BAR_THREE_PARTS; ++i) {
int value;
if(useZeros && rand.nextBoolean()) {
value = 0;
} else {
value = rand.nextInt(50);
}
barThree.setPartSize(i, value);
barThree.setPartTooltip(i, "part" + i + ", with size: " + value);
}
// ----
DistributionBar barFour = bars.get(3);
for (int i = 0; i < BAR_FOUR_PARTS; ++i) {
if(useZeros && rand.nextBoolean()) {
barFour.setPartSize(i, 0, null);
} else {
int value = rand.nextInt(10);
String caption = value == 0 ? null : Integer.toString(value) + " " + TYPES[i];
barFour.setPartSize(i, value, caption);
}
}
// ----
DistributionBar barFive = bars.get(4);
for (int i = 0; i < BAR_FIVE_PARTS; ++i) {
if(useZeros && rand.nextBoolean()) {
barFive.setPartSize(i, rand.nextInt(1));
} else {
int value = rand.nextInt(10000000);
barFive.setPartSize(i, value,
i == 0 ? Integer.toString(value) + " said YES!" : Integer.toString(value) + " said NO!");
}
}
// ----
DistributionBar barSix = bars.get(5);
int newSize = 1 + rand.nextInt(9);
barSix.setNumberOfParts(newSize);
for (int i = 0; i < newSize; ++i) {
barSix.setPartSize(i, rand.nextInt(5));
}
// ----
DistributionBar barSeven = bars.get(6);
StringBuilder sb = new StringBuilder();
sb.append("changing label ");
int letters = rand.nextInt(5);
for(int i = 0; i < letters; ++i) {
sb.append("x");
}
changingLabel.setValue(sb.toString());
for (int i = 0; i < BAR_SEVEN_PARTS; ++i) {
barSeven.setPartSize(i, 5 + rand.nextInt(15));
}
// ----
DistributionBar barEight = bars.get(7);
for(int i = 0; i < BAR_EIGHT_PARTS; ++i) {
int value = 1 + rand.nextInt(15);
barEight.setPartSize(i, value);
}
}
}
| |
package org.ovirt.engine.ui.webadmin.section.main.presenter.tab.host;
import org.ovirt.engine.core.common.businessentities.NonOperationalReason;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.mode.ApplicationMode;
import org.ovirt.engine.core.compat.Event;
import org.ovirt.engine.core.compat.EventArgs;
import org.ovirt.engine.core.compat.IEventListener;
import org.ovirt.engine.core.compat.PropertyChangedEventArgs;
import org.ovirt.engine.ui.common.presenter.AbstractSubTabPresenter;
import org.ovirt.engine.ui.common.uicommon.model.DetailModelProvider;
import org.ovirt.engine.ui.common.uicommon.model.UiCommonInitEvent;
import org.ovirt.engine.ui.common.widget.tab.ModelBoundTabData;
import org.ovirt.engine.ui.uicommonweb.UICommand;
import org.ovirt.engine.ui.uicommonweb.models.ApplicationModeHelper;
import org.ovirt.engine.ui.uicommonweb.models.hosts.HostGeneralModel;
import org.ovirt.engine.ui.uicommonweb.models.hosts.HostListModel;
import org.ovirt.engine.ui.uicompat.EnumTranslator;
import org.ovirt.engine.ui.uicompat.Translator;
import org.ovirt.engine.ui.webadmin.ApplicationMessages;
import org.ovirt.engine.ui.webadmin.gin.ClientGinjector;
import org.ovirt.engine.ui.webadmin.gin.ClientGinjectorProvider;
import org.ovirt.engine.ui.webadmin.place.ApplicationPlaces;
import org.ovirt.engine.ui.webadmin.section.main.presenter.tab.HostSelectionChangeEvent;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.shared.EventBus;
import com.google.gwt.user.client.ui.Anchor;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
import com.gwtplatform.mvp.client.TabData;
import com.gwtplatform.mvp.client.annotations.NameToken;
import com.gwtplatform.mvp.client.annotations.ProxyCodeSplit;
import com.gwtplatform.mvp.client.annotations.ProxyEvent;
import com.gwtplatform.mvp.client.annotations.TabInfo;
import com.gwtplatform.mvp.client.proxy.PlaceManager;
import com.gwtplatform.mvp.client.proxy.PlaceRequest;
import com.gwtplatform.mvp.client.proxy.RevealContentEvent;
import com.gwtplatform.mvp.client.proxy.TabContentProxyPlace;
public class SubTabHostGeneralPresenter extends AbstractSubTabPresenter<VDS, HostListModel, HostGeneralModel, SubTabHostGeneralPresenter.ViewDef, SubTabHostGeneralPresenter.ProxyDef> {
@ProxyCodeSplit
@NameToken(ApplicationPlaces.hostGeneralSubTabPlace)
public interface ProxyDef extends TabContentProxyPlace<SubTabHostGeneralPresenter> {
}
public interface ViewDef extends AbstractSubTabPresenter.ViewDef<VDS> {
/**
* Clear all the alerts currently displayed in the alerts panel of the host.
*/
void clearAlerts();
/**
* Displays a new alert in the alerts panel of the host.
*
* @param widget
* the widget used to display the alert, usually just a text label, but can also be a text label with
* a link to an action embedded
*/
void addAlert(Widget widget);
}
// We need this to get the text of the alert messages:
private final ApplicationMessages messages;
@TabInfo(container = HostSubTabPanelPresenter.class)
static TabData getTabData(ClientGinjector ginjector) {
return new ModelBoundTabData(ginjector.getApplicationConstants().hostGeneralSubTabLabel(), 0,
ginjector.getSubTabHostGeneralModelProvider());
}
@Inject
public SubTabHostGeneralPresenter(EventBus eventBus, ViewDef view, ProxyDef proxy,
PlaceManager placeManager, DetailModelProvider<HostListModel, HostGeneralModel> modelProvider) {
super(eventBus, view, proxy, placeManager, modelProvider);
// Inject a reference to the messages:
messages = ClientGinjectorProvider.instance().getApplicationMessages();
}
@Override
public void onUiCommonInit(UiCommonInitEvent event) {
super.onUiCommonInit(event);
// Initialize the list of alerts:
final HostGeneralModel model = getModelProvider().getModel();
updateAlerts(getView(), model);
// Listen for changes in the properties of the model in order
// to update the alerts panel:
model.getPropertyChangedEvent().addListener(new IEventListener() {
@Override
public void eventRaised(Event ev, Object sender, EventArgs args) {
if (args instanceof PropertyChangedEventArgs) {
PropertyChangedEventArgs changedArgs = (PropertyChangedEventArgs) args;
if (changedArgs.PropertyName.contains("Alert")) { //$NON-NLS-1$
updateAlerts(getView(), model);
}
}
}
});
}
/**
* Review the model and if there are alerts add them to the view.
*
* @param view
* the view where alerts should be added
* @param model
* the model to review
*/
private void updateAlerts(final ViewDef view, final HostGeneralModel model) {
// Clear all the alerts:
view.clearAlerts();
// Review the alerts and add those that are active:
if (model.getHasUpgradeAlert()) {
addTextAlert(view, messages.hostHasUpgradeAlert());
}
if (model.getHasReinstallAlertNonResponsive()) {
addTextAlert(view, messages.hostHasReinstallAlertNonResponsive());
}
if (model.getHasReinstallAlertInstallFailed()) {
addTextAndLinkAlert(view, messages.hostHasReinstallAlertInstallFailed(), model.getInstallCommand());
}
if (model.getHasReinstallAlertMaintenance()) {
addTextAndLinkAlert(view, messages.hostHasReinstallAlertMaintenance(), model.getInstallCommand());
}
if (model.getHasNICsAlert()) {
addTextAndLinkAlert(view, messages.hostHasNICsAlert(), model.getSaveNICsConfigCommand());
}
if (model.getHasManualFenceAlert()) {
addTextAlert(view, messages.hostHasManualFenceAlert());
}
if (ApplicationModeHelper.getUiMode() != ApplicationMode.GlusterOnly && model.getHasNoPowerManagementAlert()) {
addTextAndLinkAlert(view, messages.hostHasNoPowerManagementAlert(), model.getEditHostCommand());
}
if (model.getNonOperationalReasonEntity() != null) {
Translator translator = EnumTranslator.Create(NonOperationalReason.class);
addTextAlert(view, translator.get(model.getNonOperationalReasonEntity()));
}
}
/**
* Create a widget containing text and add it to the alerts panel of the host.
*
* @param view
* the view where the alert should be added
* @param text
* the text content of the alert
*/
private void addTextAlert(final ViewDef view, final String text) {
final Label label = new Label(text);
view.addAlert(label);
}
/**
* Create a widget containing text and a link that triggers the execution of a command.
*
* @param view
* the view where the alert should be added
* @param text
* the text content of the alert
* @param command
* the command that should be executed when the link is clicked
*/
private void addTextAndLinkAlert(final ViewDef view, final String text, final UICommand command) {
// Find the open and close positions of the link within the message:
final int openIndex = text.indexOf("<a>"); //$NON-NLS-1$
final int closeIndex = text.indexOf("</a>"); //$NON-NLS-1$
if (openIndex == -1 || closeIndex == -1 || closeIndex < openIndex) {
return;
}
// Extract the text before, inside and after the tags:
final String beforeText = text.substring(0, openIndex);
final String betweenText = text.substring(openIndex + 3, closeIndex);
final String afterText = text.substring(closeIndex + 4);
// Create a flow panel containing the text and the link:
final FlowPanel alertPanel = new FlowPanel();
// Create the label for the text before the tag:
final Label beforeLabel = new Label(beforeText);
beforeLabel.getElement().getStyle().setProperty("display", "inline"); //$NON-NLS-1$ //$NON-NLS-2$
alertPanel.add(beforeLabel);
// Create the anchor:
final Anchor betweenAnchor = new Anchor(betweenText);
betweenAnchor.getElement().getStyle().setProperty("display", "inline"); //$NON-NLS-1$ //$NON-NLS-2$
alertPanel.add(betweenAnchor);
// Add a listener to the anchor so that the command is executed when
// it is clicked:
betweenAnchor.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
command.Execute();
}
}
);
// Create the label for the text after the tag:
final Label afterLabel = new Label(afterText);
afterLabel.getElement().getStyle().setProperty("display", "inline"); //$NON-NLS-1$ //$NON-NLS-2$
alertPanel.add(afterLabel);
// Add the alert to the view:
view.addAlert(alertPanel);
}
@Override
protected void revealInParent() {
RevealContentEvent.fire(this, HostSubTabPanelPresenter.TYPE_SetTabContent, this);
}
@Override
protected PlaceRequest getMainTabRequest() {
return new PlaceRequest(ApplicationPlaces.hostMainTabPlace);
}
@ProxyEvent
public void onHostSelectionChange(HostSelectionChangeEvent event) {
updateMainTabSelection(event.getSelectedItems());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.core.impl;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.directory.api.ldap.model.constants.SchemaConstants;
import org.apache.directory.api.ldap.model.cursor.CursorException;
import org.apache.directory.api.ldap.model.cursor.SearchCursor;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.exception.LdapInvalidAttributeValueException;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.fortress.core.FinderException;
import org.apache.directory.fortress.core.GlobalErrIds;
import org.apache.directory.fortress.core.GlobalIds;
import org.apache.directory.fortress.core.ldap.LdapDataProvider;
import org.apache.directory.fortress.core.model.AuthZ;
import org.apache.directory.fortress.core.model.Bind;
import org.apache.directory.fortress.core.model.Mod;
import org.apache.directory.fortress.core.model.ObjectFactory;
import org.apache.directory.fortress.core.model.UserAudit;
import org.apache.directory.fortress.core.util.AuditUtil;
import org.apache.directory.fortress.core.util.Config;
import org.apache.directory.fortress.core.util.time.TUtil;
import org.apache.directory.ldap.client.api.LdapConnection;
/**
* This class performs data access for OpenLDAP synch repl log data
* <p>
* <h3>1. Binds</h3>
* <p>
* The auditBind Structural object class is used to store authentication events that can later be queried via ldap API.<br>
* <code># The Bind class includes the reqVersion attribute which contains the LDAP</code>
* <code># protocol version specified in the Bind as well as the reqMethod attribute</code>
* <code># which contains the Bind Method used in the Bind. This will be the string</code>
* <code># SIMPLE for LDAP Simple Binds or SASL(mech) for SASL Binds. Note that unless</code>
* <code># configured as a global overlay, only Simple Binds using DNs that reside in</code>
* <code># the current database will be logged:</code>
* <ul>
* <li> ------------------------------------------
* <li> <code>objectclass ( 1.3.6.1.4.1.4203.666.11.5.2.6 NAME 'auditBind'</code>
* <li> <code>DESC 'Bind operation'</code>
* <li> <code>SUP auditObject STRUCTURAL</code>
* <li> <code>MUST ( reqVersion $ reqMethod ) )</code>
* <li> ------------------------------------------
* </ul>
* <h3>2. Authorizations</h3>
* <code>For the Search class the reqScope attribute contains the scope of the</code><br>
* <code>original search request, using the values specified for the LDAP URL</code><br>
* <code>format. I.e. base, one, sub, or subord. The reqDerefAliases attribute</code><br>
* <code>is one of never, finding, searching, or always, denoting how aliases</code><br>
* <code>will be processed during the search. The reqAttrsOnly attribute is a</code><br>
* <code>Boolean value showing TRUE if only attribute names were requested, or</code><br>
* <code>FALSE if attributes and their values were requested. The reqFilter</code><br>
* <code>attribute carries the filter used in the search request. The reqAttr</code><br>
* <code>attribute lists the requested attributes if specific attributes were</code><br>
* <code>requested. The reqEntries attribute is the integer count of how many</code><br>
* <code>entries were returned by this search request. The reqSizeLimit and</code><br>
* <code>reqTimeLimit attributes indicate what limits were requested on the</code><br>
* <code>search operation.</code><br>
* <ul>
* <li> ------------------------------------------
* <li> <code>objectclass ( 1.3.6.1.4.1.4203.666.11.5.2.11</code>
* <li> <code>NAME 'auditSearch'</code>
* <li> <code>DESC 'Search operation'</code>
* <li> <code>SUP auditReadObject STRUCTURAL</code>
* <li> <code>MUST ( reqScope $ reqDerefAliases $ reqAttrsOnly )</code>
* <li> <code>MAY ( reqFilter $ reqAttr $ reqEntries $ reqSizeLimit $</code>
* <li> <code>reqTimeLimit ) )</code>
* <li> ------------------------------------------
* </ul>
* <p>
* <p>
* <h3>3. Modifications</h3>
* The auditModify Structural object class is used to store Fortress update and delete events that can later be queried via ldap API.<br>
* The deletions can be recorded in this manner and associated with Fortress context because deletions will perform a modification first
* if audit is enabled.
* <p>
* <code>The Modify operation contains a description of modifications in the</code><br>
* <code>reqMod attribute, which was already described above in the Add</code><br>
* <code>operation. It may optionally contain the previous contents of any</code><br>
* <code>modified attributes in the reqOld attribute, using the same format as</code><br>
* <code>described above for the Delete operation. The reqOld attribute is only</code><br>
* <code>populated if the entry being modified matches the configured logold</code><br>
* <code>filter.</code><br>
* <ul>
* <li> ------------------------------------------
* <li> <code>objectclass ( 1.3.6.1.4.1.4203.666.11.5.2.9</code>
* <li> <code>NAME 'auditModify'</code>
* <li> <code>DESC 'Modify operation'</code>
* <li> <code>SUP auditWriteObject STRUCTURAL</code>
* <li> <code>MAY reqOld MUST reqMod )</code>
* <li> ------------------------------------------
* </ul>
* <p>
* Note this class used descriptions pulled from man pages on slapd access log.
* <p>
* This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
final class AuditDAO extends LdapDataProvider
{
private static final String CREATETIMESTAMP = "createTimestamp";
private static final String CREATORSNAME = "creatorsName";
private static final String ENTRYCSN = "entryCSN";
private static final String ENTRYDN = "entryDN";
private static final String ENTRYUUID = "entryUUID";
private static final String HASSUBORDINATES = "hasSubordinates";
private static final String MODIFIERSNAME = "modifiersName";
private static final String MODIFYTIMESTAMP = "modifyTimestamp";
private static final String OBJECTCLASS = "objectClass";
private static final String REQUAUTHZID = "reqAuthzID";
private static final String REQCONTROLS = "reqControls";
private static final String REQDN = "reqDN";
private static final String REQEND = "reqEnd";
private static final String REQMETHOD = "reqMethod";
private static final String REQRESULT = "reqResult";
private static final String REQSESSION = "reqSession";
private static final String REQSTART = "reqStart";
private static final String REQTYPE = "reqType";
private static final String REQVERSION = "reqVersion";
private static final String REQMOD = "reqMod";
private static final String STRUCTURALOBJECTCLASS = "structuralObjectClass";
private static final String SUBSCHEMAENTRY = "subschemaSubentry";
private static final String REQATTR = "reqAttr";
private static final String REQATTRSONLY = "reqAttrsOnly";
private static final String REQDREFALIASES = "reqDerefAliases";
private static final String REQENTRIES = "reqEntries";
private static final String REQFILTER = "reqFilter";
private static final String REQSCOPE = "reqScope";
private static final String REQSIZELIMIT = "reqSizeLimit";
private static final String REQTIMELIMIT = "reqTimeLimit";
private static final String REQASSERTION = "reqAssertion";
private static final String ACCESS_BIND_CLASS_NM = "auditBind";
//private static final String ACCESS_AUTHZ_CLASS_NM = "auditSearch";
private static final String ACCESS_AUTHZ_CLASS_NM = "auditCompare";
private static final String ACCESS_MOD_CLASS_NM = "auditModify";
private static final String ACCESS_ADD_CLASS_NM = "auditAdd";
private static final String AUDIT_ROOT = "audit.root";
private static final String[] AUDIT_AUTHZ_ATRS =
{
CREATETIMESTAMP, CREATORSNAME, ENTRYCSN, ENTRYDN, ENTRYUUID, HASSUBORDINATES, MODIFIERSNAME,
MODIFYTIMESTAMP, OBJECTCLASS, REQATTR, REQATTRSONLY, REQUAUTHZID, REQCONTROLS, REQDN, REQDREFALIASES,
REQEND, REQENTRIES, REQFILTER, REQRESULT, REQSCOPE, REQSESSION, REQSIZELIMIT, REQSTART, REQTIMELIMIT,
REQTYPE, STRUCTURALOBJECTCLASS, SUBSCHEMAENTRY
};
private static final String[] AUDIT_BIND_ATRS =
{
CREATETIMESTAMP, CREATORSNAME, ENTRYCSN, ENTRYDN, ENTRYUUID, HASSUBORDINATES, MODIFIERSNAME,
MODIFYTIMESTAMP, OBJECTCLASS, REQUAUTHZID, REQCONTROLS, REQDN, REQEND, REQMETHOD, REQRESULT,
REQSESSION, REQSTART, REQTYPE, REQVERSION, STRUCTURALOBJECTCLASS, SUBSCHEMAENTRY
};
private static final String[] AUDIT_MOD_ATRS =
{
OBJECTCLASS, REQUAUTHZID, REQDN, REQEND, REQRESULT, REQSESSION, REQSTART, REQTYPE, REQMOD
};
public AuditDAO(){
super();
}
/**
* This method returns failed authentications where the userid is not present in the directory. This
* is possible because Fortress performs read on user before the bind.
* User:
* dn: reqStart=20101014235402.000000Z, cn=log
* reqStart: 20101014235402.000000Z
* reqEnd: 20101014235402.000001Z
* reqAuthzID: cn=Manager,dc=jts,dc=com
* reqDerefAliases: never
* reqSession: 84
* reqAttrsOnly: FALSE
* reqSizeLimit: -1
* objectClass: auditSearch
* reqResult: 32
* reqAttr: ftId
* reqAttr: uid
* reqAttr: userpassword
* reqAttr: description
* reqAttr: ou
* reqAttr: cn
* reqAttr: sn
* reqAttr: ftRoleCstr
* reqAttr: ftCstr
* reqAttr: ftRoleAsgn
* reqAttr: pwdReset
* reqAttr: pwdAccountLockedTime
* reqAttr: ftProps
* reqEntries: 0
* reqFilter: (|(objectClass=*)(?objectClass=ldapSubentry))
* reqType: search
* reqDN: uid=foo,ou=People,dc=jts,dc=com /cal/cal2.jsp
* reqTimeLimit: -1
* reqScope: base
*
* @param audit
* @return
* @throws org.apache.directory.fortress.core.FinderException
*
*/
List<AuthZ> searchInvalidAuthNs( UserAudit audit ) throws FinderException
{
List<AuthZ> auditList = new ArrayList<>();
LdapConnection ld = null;
String auditRoot = Config.getInstance().getProperty( AUDIT_ROOT );
String userRoot = Config.getInstance().getProperty( GlobalIds.USER_ROOT );
try
{
// use wildcard for user if not passed in:
//reqDN: uid=foo,ou=People,dc=jts,dc=com
//(&
// (objectclass=auditSearch)
// (reqDN=uid=*,ou=People,dc=jts,dc=com)
// (reqAuthzID=cn=Manager,dc=jts,dc=com)
// (reqEntries=0)
// )
String filter = GlobalIds.FILTER_PREFIX + ACCESS_AUTHZ_CLASS_NM + ")(";
String userId;
if ( StringUtils.isNotEmpty( audit.getUserId() ) )
{
userId = audit.getUserId();
filter += REQDN + "=" + SchemaConstants.UID_AT + "=" + userId + "," + userRoot + ")(" +
REQUAUTHZID + "=" + "cn=Manager," + Config.getInstance().getProperty( GlobalIds.SUFFIX ) + ")";
}
else
{
// pull back all failed authN attempts for all users:
filter += REQATTR + "=" + SchemaConstants.UID_AT + ")(" +
REQUAUTHZID + "=" + "cn=Manager," + Config.getInstance().getProperty( GlobalIds.SUFFIX ) + ")";
}
if ( audit.isFailedOnly() )
{
filter += "(" + REQENTRIES + "=" + 0 + ")";
}
if ( audit.getBeginDate() != null )
{
String szTime = TUtil.encodeGeneralizedTime( audit.getBeginDate() );
filter += "(" + REQEND + ">=" + szTime + ")";
}
filter += ")";
//log.warn("filter=" + filter);
ld = getLogConnection();
SearchCursor searchResults = search( ld, auditRoot,
SearchScope.ONELEVEL, filter, AUDIT_AUTHZ_ATRS, false, GlobalIds.BATCH_SIZE );
long sequence = 0;
while ( searchResults.next() )
{
AuthZ authZ = getAuthzEntityFromLdapEntry( searchResults.getEntry(), sequence++ );
// todo: fix this workaround. This search will return failed role assign searches as well.
// Work around is to remove the ou=People failed searches from user failed searches on authN.
if ( !AuditUtil.getAuthZId( authZ.getReqDN() ).equalsIgnoreCase( "People" ) )
{
auditList.add( authZ );
}
}
}
catch ( LdapException e )
{
String error = "LdapException in AuditDAO.searchAuthZs id=" + e;
throw new FinderException( GlobalErrIds.AUDT_AUTHN_INVALID_FAILED, error, e );
}
catch ( CursorException e )
{
String error = "CursorException in AuditDAO.searchAuthZs id=" + e.getMessage();
throw new FinderException( GlobalErrIds.AUDT_AUTHN_INVALID_FAILED, error, e );
}
finally
{
closeLogConnection( ld );
}
return auditList;
}
/**
* @param audit
* @return
* @throws org.apache.directory.fortress.core.FinderException
*
*/
List<AuthZ> searchAuthZs( UserAudit audit ) throws FinderException
{
List<AuthZ> auditList = new ArrayList<>();
LdapConnection ld = null;
String auditRoot = Config.getInstance().getProperty( AUDIT_ROOT );
String permRoot = getRootDn( audit.isAdmin(), audit.getContextId() );
String userRoot = getRootDn( audit.getContextId(), GlobalIds.USER_ROOT );
try
{
String reqDn = PermDAO.getOpRdn( audit.getOpName(), audit.getObjId() ) + "," + GlobalIds.POBJ_NAME + "="
+ audit.getObjName() + "," + permRoot;
String filter = GlobalIds.FILTER_PREFIX + ACCESS_AUTHZ_CLASS_NM + ")(" + REQDN + "=" +
reqDn + ")(" + REQUAUTHZID + "=" + SchemaConstants.UID_AT + "=" + audit.getUserId() + "," + userRoot
+ ")";
if ( audit.isFailedOnly() )
{
filter += "(" + REQRESULT + "=" + GlobalIds.AUTHZ_COMPARE_FAILURE_FLAG + ")";
}
if ( audit.getBeginDate() != null )
{
String szTime = TUtil.encodeGeneralizedTime( audit.getBeginDate() );
filter += "(" + REQEND + ">=" + szTime + ")";
}
filter += ")";
//System.out.println("filter=" + filter);
ld = getLogConnection();
SearchCursor searchResults = search( ld, auditRoot,
SearchScope.ONELEVEL, filter, AUDIT_AUTHZ_ATRS, false, GlobalIds.BATCH_SIZE );
long sequence = 0;
while ( searchResults.next() )
{
auditList.add( getAuthzEntityFromLdapEntry( searchResults.getEntry(), sequence++ ) );
}
}
catch ( LdapException e )
{
String error = "LdapException in AuditDAO.searchAuthZs id=" + e;
throw new FinderException( GlobalErrIds.AUDT_AUTHZ_SEARCH_FAILED, error, e );
}
catch ( CursorException e )
{
String error = "CursorException in AuditDAO.searchAuthZs id=" + e.getMessage();
throw new FinderException( GlobalErrIds.AUDT_AUTHZ_SEARCH_FAILED, error, e );
}
finally
{
closeLogConnection( ld );
}
return auditList;
}
private String getRootDn( boolean isAdmin, String contextId )
{
String dn;
if ( isAdmin )
{
dn = getRootDn( contextId, GlobalIds.ADMIN_PERM_ROOT );
}
else
{
dn = getRootDn( contextId, GlobalIds.PERM_ROOT );
}
return dn;
}
/**
* @param audit
* @return
* @throws org.apache.directory.fortress.core.FinderException
*
*/
List<AuthZ> getAllAuthZs( UserAudit audit ) throws FinderException
{
List<AuthZ> auditList = new ArrayList<>();
LdapConnection ld = null;
String auditRoot = Config.getInstance().getProperty( AUDIT_ROOT );
String userRoot = getRootDn( audit.getContextId(), GlobalIds.USER_ROOT );
try
{
String filter = GlobalIds.FILTER_PREFIX + ACCESS_AUTHZ_CLASS_NM + ")(";
if ( audit.getUserId() != null && audit.getUserId().length() > 0 )
{
filter += REQUAUTHZID + "=" + SchemaConstants.UID_AT + "=" + audit.getUserId() + "," + userRoot + ")";
}
else
{
// have to limit the query to only authorization entries.
// TODO: determine why the cn=Manager user is showing up in this search:
filter += REQUAUTHZID + "=*)(!(" + REQUAUTHZID + "=cn=Manager," + Config.getInstance().getProperty( GlobalIds.SUFFIX )
+ "))";
// TODO: fix this so filter by only the Fortress AuthZ entries and not the others:
if ( audit.isFailedOnly() )
{
filter += "(" + REQRESULT + "=" + GlobalIds.AUTHZ_COMPARE_FAILURE_FLAG + ")";
}
}
if ( audit.getBeginDate() != null )
{
String szTime = TUtil.encodeGeneralizedTime( audit.getBeginDate() );
filter += "(" + REQEND + ">=" + szTime + ")";
}
filter += ")";
//log.warn("filter=" + filter);
ld = getLogConnection();
SearchCursor searchResults = search( ld, auditRoot,
SearchScope.ONELEVEL, filter, AUDIT_AUTHZ_ATRS, false, GlobalIds.BATCH_SIZE );
long sequence = 0;
while ( searchResults.next() )
{
auditList.add( getAuthzEntityFromLdapEntry( searchResults.getEntry(), sequence++ ) );
}
}
catch ( LdapException e )
{
String error = "LdapException in AuditDAO.getAllAuthZs id=" + e;
throw new FinderException( GlobalErrIds.AUDT_AUTHZ_SEARCH_FAILED, error, e );
}
catch ( CursorException e )
{
String error = "CursorException in AuditDAO.getAllAuthZs id=" + e.getMessage();
throw new FinderException( GlobalErrIds.AUDT_AUTHZ_SEARCH_FAILED, error, e );
}
finally
{
closeLogConnection( ld );
}
return auditList;
}
/**
* @param audit
* @return
* @throws org.apache.directory.fortress.core.FinderException
*
*/
List<Bind> searchBinds( UserAudit audit ) throws FinderException
{
List<Bind> auditList = new ArrayList<>();
LdapConnection ld = null;
String auditRoot = Config.getInstance().getProperty( AUDIT_ROOT );
String userRoot = getRootDn( audit.getContextId(), GlobalIds.USER_ROOT );
try
{
String filter;
if ( audit.getUserId() != null && audit.getUserId().length() > 0 )
{
filter = GlobalIds.FILTER_PREFIX + ACCESS_BIND_CLASS_NM + ")(" +
REQDN + "=" + SchemaConstants.UID_AT + "=" + audit.getUserId() + "," + userRoot + ")";
if ( audit.isFailedOnly() )
{
filter += "(" + REQRESULT + ">=" + 1 + ")";
}
if ( audit.getBeginDate() != null )
{
String szTime = TUtil.encodeGeneralizedTime( audit.getBeginDate() );
filter += "(" + REQEND + ">=" + szTime + ")";
}
filter += ")";
}
else
{
filter = GlobalIds.FILTER_PREFIX + ACCESS_BIND_CLASS_NM + ")";
if ( audit.isFailedOnly() )
{
filter += "(" + REQRESULT + ">=" + 1 + ")";
}
if ( audit.getBeginDate() != null )
{
String szTime = TUtil.encodeGeneralizedTime( audit.getBeginDate() );
filter += "(" + REQEND + ">=" + szTime + ")";
}
filter += ")";
}
//log.warn("filter=" + filter);
ld = getLogConnection();
SearchCursor searchResults = search( ld, auditRoot,
SearchScope.ONELEVEL, filter, AUDIT_BIND_ATRS, false, GlobalIds.BATCH_SIZE );
long sequence = 0;
while ( searchResults.next() )
{
auditList.add( getBindEntityFromLdapEntry( searchResults.getEntry(), sequence++ ) );
}
}
catch ( LdapException e )
{
String error = "LdapException in AuditDAO.searchBinds id=" + e;
throw new FinderException( GlobalErrIds.AUDT_BIND_SEARCH_FAILED, error, e );
}
catch ( CursorException e )
{
String error = "CursorException in AuditDAO.searchBinds id=" + e.getMessage();
throw new FinderException( GlobalErrIds.AUDT_BIND_SEARCH_FAILED, error, e );
}
finally
{
closeLogConnection( ld );
}
return auditList;
}
/**
* @param audit
* @return
* @throws org.apache.directory.fortress.core.FinderException
*
*/
List<Mod> searchUserMods( UserAudit audit ) throws FinderException
{
List<Mod> modList = new ArrayList<>();
LdapConnection ld = null;
String auditRoot = Config.getInstance().getProperty( AUDIT_ROOT );
String userRoot = getRootDn( audit.getContextId(), GlobalIds.USER_ROOT );
try
{
String filter = GlobalIds.FILTER_PREFIX + ACCESS_MOD_CLASS_NM + ")(" +
REQDN + "=" + SchemaConstants.UID_AT + "=" + audit.getUserId() + "," + userRoot + ")";
if ( audit.getBeginDate() != null )
{
String szTime = TUtil.encodeGeneralizedTime( audit.getBeginDate() );
filter += "(" + REQEND + ">=" + szTime + ")";
}
filter += ")";
//log.warn("filter=" + filter);
ld = getLogConnection();
SearchCursor searchResults = search( ld, auditRoot,
SearchScope.ONELEVEL, filter, AUDIT_MOD_ATRS, false, GlobalIds.BATCH_SIZE );
long sequence = 0;
while ( searchResults.next() )
{
modList.add( getModEntityFromLdapEntry( searchResults.getEntry(), sequence++ ) );
}
}
catch ( LdapException e )
{
String error = "searchUserMods caught LdapException id=" + e;
throw new FinderException( GlobalErrIds.AUDT_MOD_SEARCH_FAILED, error, e );
}
catch ( CursorException e )
{
String error = "searchUserMods caught CursorException id=" + e.getMessage();
throw new FinderException( GlobalErrIds.AUDT_MOD_SEARCH_FAILED, error, e );
}
finally
{
closeLogConnection( ld );
}
return modList;
}
/**
* @param audit
* @return
* @throws FinderException
*/
List<Mod> searchAdminMods( UserAudit audit ) throws FinderException
{
List<Mod> modList = new ArrayList<>();
LdapConnection ld = null;
String auditRoot = Config.getInstance().getProperty( AUDIT_ROOT );
try
{
String filter = "(&(|(objectclass=" + ACCESS_MOD_CLASS_NM + ")";
filter += "(objectclass=" + ACCESS_ADD_CLASS_NM + "))";
if ( StringUtils.isNotEmpty( audit.getDn() ) )
{
filter += "(" + REQDN + "=" + audit.getDn() + ")";
}
if ( StringUtils.isNotEmpty( audit.getObjName() ) )
{
filter += "(|(" + REQMOD + "=" + GlobalIds.FT_MODIFIER_CODE + ":= " + audit.getObjName() + ".";
if ( StringUtils.isNotEmpty( audit.getOpName() ) )
{
filter += audit.getOpName();
}
filter += "*)";
filter += "(" + REQMOD + "=" + GlobalIds.FT_MODIFIER_CODE + ":+ " + audit.getObjName() + ".";
if ( StringUtils.isNotEmpty( audit.getOpName() ) )
{
filter += audit.getOpName();
}
filter += "*))";
}
if ( StringUtils.isNotEmpty( audit.getInternalUserId() ) )
{
filter += "(|(" + REQMOD + "=" + GlobalIds.FT_MODIFIER + ":= " + audit.getInternalUserId() + ")";
filter += "(" + REQMOD + "=" + GlobalIds.FT_MODIFIER + ":+ " + audit.getInternalUserId() + "))";
}
if ( audit.getBeginDate() != null )
{
String szTime = TUtil.encodeGeneralizedTime( audit.getBeginDate() );
filter += "(" + REQEND + ">=" + szTime + ")";
}
if ( audit.getEndDate() != null )
{
String szTime = TUtil.encodeGeneralizedTime( audit.getEndDate() );
filter += "(" + REQEND + "<=" + szTime + ")";
}
filter += ")";
//log.warn("filter=" + filter);
ld = getLogConnection();
SearchCursor searchResults = search( ld, auditRoot,
SearchScope.ONELEVEL, filter, AUDIT_MOD_ATRS, false, GlobalIds.BATCH_SIZE );
long sequence = 0;
while ( searchResults.next() )
{
modList.add( getModEntityFromLdapEntry( searchResults.getEntry(), sequence++ ) );
}
}
catch ( LdapException e )
{
String error = "searchAdminMods caught LdapException id=" + e;
throw new FinderException( GlobalErrIds.AUDT_MOD_ADMIN_SEARCH_FAILED, error, e );
}
catch ( CursorException e )
{
String error = "searchAdminMods caught CursorException id=" + e.getMessage();
throw new FinderException( GlobalErrIds.AUDT_MOD_ADMIN_SEARCH_FAILED, error, e );
}
finally
{
closeLogConnection( ld );
}
return modList;
}
/**
* @param le
* @return
* @throws LdapInvalidAttributeValueException
* @throws LdapException
*/
private Bind getBindEntityFromLdapEntry( Entry le, long sequence ) throws LdapInvalidAttributeValueException
{
Bind auditBind = new ObjectFactory().createBind();
auditBind.setSequenceId( sequence );
auditBind.setCreateTimestamp( getAttribute( le, CREATETIMESTAMP ) );
auditBind.setCreatorsName( getAttribute( le, CREATORSNAME ) );
auditBind.setEntryCSN( getAttribute( le, ENTRYCSN ) );
auditBind.setEntryDN( getAttribute( le, ENTRYDN ) );
auditBind.setEntryUUID( getAttribute( le, ENTRYUUID ) );
auditBind.setHasSubordinates( getAttribute( le, HASSUBORDINATES ) );
auditBind.setModifiersName( getAttribute( le, MODIFIERSNAME ) );
auditBind.setModifyTimestamp( getAttribute( le, MODIFYTIMESTAMP ) );
auditBind.setObjectClass( getAttribute( le, OBJECTCLASS ) );
auditBind.setReqAuthzID( getAttribute( le, REQUAUTHZID ) );
auditBind.setReqControls( getAttribute( le, REQCONTROLS ) );
auditBind.setReqDN( getAttribute( le, REQDN ) );
auditBind.setReqEnd( getAttribute( le, REQEND ) );
auditBind.setReqMethod( getAttribute( le, REQMETHOD ) );
auditBind.setReqResult( getAttribute( le, REQRESULT ) );
auditBind.setReqSession( getAttribute( le, REQSESSION ) );
auditBind.setReqStart( getAttribute( le, REQSTART ) );
auditBind.setReqType( getAttribute( le, REQTYPE ) );
auditBind.setReqVersion( getAttribute( le, REQVERSION ) );
auditBind.setStructuralObjectClass( getAttribute( le, STRUCTURALOBJECTCLASS ) );
return auditBind;
}
/**
* @param le
* @return
* @throws LdapInvalidAttributeValueException
* @throws LdapException
*/
private AuthZ getAuthzEntityFromLdapEntry( Entry le, long sequence ) throws LdapInvalidAttributeValueException
{
// these attrs also on audit bind OC:
AuthZ authZ = new ObjectFactory().createAuthZ();
authZ.setSequenceId( sequence );
authZ.setCreateTimestamp( getAttribute( le, CREATETIMESTAMP ) );
authZ.setCreatorsName( getAttribute( le, CREATORSNAME ) );
authZ.setEntryCSN( getAttribute( le, ENTRYCSN ) );
authZ.setEntryDN( getAttribute( le, ENTRYDN ) );
authZ.setEntryUUID( getAttribute( le, ENTRYUUID ) );
authZ.setHasSubordinates( getAttribute( le, HASSUBORDINATES ) );
authZ.setModifiersName( getAttribute( le, MODIFIERSNAME ) );
authZ.setModifyTimestamp( getAttribute( le, MODIFYTIMESTAMP ) );
authZ.setObjectClass( getAttribute( le, OBJECTCLASS ) );
authZ.setReqAuthzID( getAttribute( le, REQUAUTHZID ) );
authZ.setReqControls( getAttribute( le, REQCONTROLS ) );
authZ.setReqDN( getAttribute( le, REQDN ) );
authZ.setReqEnd( getAttribute( le, REQEND ) );
authZ.setReqResult( getAttribute( le, REQRESULT ) );
authZ.setReqSession( getAttribute( le, REQSESSION ) );
authZ.setReqStart( getAttribute( le, REQSTART ) );
authZ.setReqType( getAttribute( le, REQTYPE ) );
authZ.setStructuralObjectClass( getAttribute( le, STRUCTURALOBJECTCLASS ) );
// these attrs only on audit search OC:
authZ.setReqAttr( getAttribute( le, REQATTR ) );
authZ.setReqAttrsOnly( getAttribute( le, REQATTRSONLY ) );
authZ.setReqDerefAliases( getAttribute( le, REQDREFALIASES ) );
authZ.setReqEntries( getAttribute( le, REQENTRIES ) );
authZ.setReqFilter( getAttribute( le, REQFILTER ) );
authZ.setReqScope( getAttribute( le, REQSCOPE ) );
authZ.setReqSizeLimit( getAttribute( le, REQSIZELIMIT ) );
authZ.setReqTimeLimit( getAttribute( le, REQTIMELIMIT ) );
return authZ;
}
private Mod getModEntityFromLdapEntry( Entry le, long sequence ) throws LdapInvalidAttributeValueException
{
Mod mod = new ObjectFactory().createMod();
mod.setSequenceId( sequence );
mod.setObjectClass( getAttribute( le, OBJECTCLASS ) );
mod.setReqAuthzID( getAttribute( le, REQUAUTHZID ) );
mod.setReqDN( getAttribute( le, REQDN ) );
mod.setReqEnd( getAttribute( le, REQEND ) );
mod.setReqResult( getAttribute( le, REQRESULT ) );
mod.setReqSession( getAttribute( le, REQSESSION ) );
mod.setReqStart( getAttribute( le, REQSTART ) );
mod.setReqType( getAttribute( le, REQTYPE ) );
mod.setReqMod( getAttributes( le, REQMOD ) );
return mod;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.groovy.engine;
import org.apache.commons.lang.ClassUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.apache.tinkerpop.gremlin.jsr223.CachedGremlinScriptEngineManager;
import org.apache.tinkerpop.gremlin.jsr223.GremlinPlugin;
import org.apache.tinkerpop.gremlin.jsr223.GremlinScriptEngine;
import org.apache.tinkerpop.gremlin.jsr223.GremlinScriptEngineManager;
import org.apache.tinkerpop.gremlin.process.traversal.Bytecode;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.script.Bindings;
import javax.script.Compilable;
import javax.script.CompiledScript;
import javax.script.ScriptException;
import javax.script.SimpleBindings;
import java.lang.ref.WeakReference;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Stream;
/**
* Execute Gremlin scripts against a {@code ScriptEngine} instance. It is designed to host any JSR-223 enabled
* {@code ScriptEngine} and assumes such engines are designed to be thread-safe in the evaluation. Script evaluation
* functions return a {@link CompletableFuture} where scripts may timeout if their evaluation
* takes too long. The default timeout is 8000ms.
* <p/>
* By default, the {@code GremlinExecutor} initializes itself to use a shared thread pool initialized with four
* threads. This default thread pool is shared for both the task of executing script evaluations and for scheduling
* timeouts. It is worth noting that a timeout simply triggers the returned {@link CompletableFuture} to abort, but
* the thread processing the script will continue to evaluate until completion. This offers only marginal protection
* against run-away scripts.
*
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public class GremlinExecutor implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(GremlinExecutor.class);
private GremlinScriptEngineManager gremlinScriptEngineManager;
private final Map<String, Map<String, Map<String,Object>>> plugins;
private final long scriptEvaluationTimeout;
private final Bindings globalBindings;
private final ExecutorService executorService;
private final ScheduledExecutorService scheduledExecutorService;
private final Consumer<Bindings> beforeEval;
private final Consumer<Bindings> afterSuccess;
private final Consumer<Bindings> afterTimeout;
private final BiConsumer<Bindings, Throwable> afterFailure;
private final boolean suppliedExecutor;
private final boolean suppliedScheduledExecutor;
private GremlinExecutor(final Builder builder, final boolean suppliedExecutor,
final boolean suppliedScheduledExecutor) {
this.executorService = builder.executorService;
this.scheduledExecutorService = builder.scheduledExecutorService;
this.beforeEval = builder.beforeEval;
this.afterSuccess = builder.afterSuccess;
this.afterTimeout = builder.afterTimeout;
this.afterFailure = builder.afterFailure;
this.plugins = builder.plugins;
this.scriptEvaluationTimeout = builder.scriptEvaluationTimeout;
this.globalBindings = builder.globalBindings;
this.gremlinScriptEngineManager = new CachedGremlinScriptEngineManager();
initializeGremlinScriptEngineManager();
this.suppliedExecutor = suppliedExecutor;
this.suppliedScheduledExecutor = suppliedScheduledExecutor;
}
/**
* Attempts to compile a script and cache it in the default {@link javax.script.ScriptEngine}. This is only
* possible if the {@link javax.script.ScriptEngine} implementation implements {@link javax.script.Compilable}.
* In the event that the default {@link javax.script.ScriptEngine} does not implement it, the method will
* return empty.
*/
public Optional<CompiledScript> compile(final String script) throws ScriptException {
return compile(script, Optional.empty());
}
/**
* Attempts to compile a script and cache it in the request {@link javax.script.ScriptEngine}. This is only
* possible if the {@link javax.script.ScriptEngine} implementation implements {@link Compilable}.
* In the event that the requested {@link javax.script.ScriptEngine} does not implement it, the method will
* return empty.
*/
public Optional<CompiledScript> compile(final String script, final Optional<String> language) throws ScriptException {
final String lang = language.orElse("gremlin-groovy");
try {
final GremlinScriptEngine scriptEngine = gremlinScriptEngineManager.getEngineByName(lang);
if (scriptEngine instanceof Compilable)
return Optional.of(((Compilable) scriptEngine).compile(script));
else
return Optional.empty();
} catch (UnsupportedOperationException uoe) {
return Optional.empty();
}
}
/**
* Evaluate a script with empty bindings.
*/
public CompletableFuture<Object> eval(final String script) {
return eval(script, null, new SimpleBindings());
}
/**
* Evaluate a script with specified bindings.
*/
public CompletableFuture<Object> eval(final String script, final Bindings boundVars) {
return eval(script, null, boundVars);
}
/**
* Evaluate a script with a {@link Map} of bindings.
*/
public CompletableFuture<Object> eval(final String script, final Map<String, Object> boundVars) {
return eval(script, null, new SimpleBindings(boundVars));
}
/**
* Evaluate a script.
*
* @param script the script to evaluate
* @param language the language to evaluate it in
* @param boundVars the bindings as a {@link Map} to evaluate in the context of the script
*/
public CompletableFuture<Object> eval(final String script, final String language, final Map<String, Object> boundVars) {
return eval(script, language, new SimpleBindings(boundVars));
}
/**
* Evaluate a script.
*
* @param script the script to evaluate
* @param language the language to evaluate it in
* @param boundVars the bindings to evaluate in the context of the script
*/
public CompletableFuture<Object> eval(final String script, final String language, final Bindings boundVars) {
return eval(script, language, boundVars, null, null);
}
/**
* Evaluate a script and allow for the submission of a transform {@link Function} that will transform the
* result after script evaluates but before transaction commit and before the returned {@link CompletableFuture}
* is completed.
*
* @param script the script to evaluate
* @param language the language to evaluate it in
* @param boundVars the bindings to evaluate in the context of the script
* @param transformResult a {@link Function} that transforms the result - can be {@code null}
*/
public CompletableFuture<Object> eval(final String script, final String language, final Map<String, Object> boundVars,
final Function<Object, Object> transformResult) {
return eval(script, language, new SimpleBindings(boundVars), transformResult, null);
}
/**
* Evaluate a script and allow for the submission of a {@link Consumer} that will take the result for additional
* processing after the script evaluates and after the {@link CompletableFuture} is completed, but before the
* transaction is committed.
*
* @param script the script to evaluate
* @param language the language to evaluate it in
* @param boundVars the bindings to evaluate in the context of the script
* @param withResult a {@link Consumer} that accepts the result - can be {@code null}
*/
public CompletableFuture<Object> eval(final String script, final String language, final Map<String, Object> boundVars,
final Consumer<Object> withResult) {
return eval(script, language, new SimpleBindings(boundVars), null, withResult);
}
/**
* Evaluate a script and allow for the submission of both a transform {@link Function} and {@link Consumer}.
* The {@link Function} will transform the result after script evaluates but before transaction commit and before
* the returned {@link CompletableFuture} is completed. The {@link Consumer} will take the result for additional
* processing after the script evaluates and after the {@link CompletableFuture} is completed, but before the
* transaction is committed.
*
* @param script the script to evaluate
* @param language the language to evaluate it in
* @param boundVars the bindings to evaluate in the context of the script
* @param transformResult a {@link Function} that transforms the result - can be {@code null}
* @param withResult a {@link Consumer} that accepts the result - can be {@code null}
*/
public CompletableFuture<Object> eval(final String script, final String language, final Bindings boundVars,
final Function<Object, Object> transformResult, final Consumer<Object> withResult) {
final LifeCycle lifeCycle = LifeCycle.build()
.transformResult(transformResult)
.withResult(withResult).create();
return eval(script, language, boundVars, lifeCycle);
}
/**
* Evaluate a script and allow for the submission of alteration to the entire evaluation execution lifecycle.
*
* @param script the script to evaluate
* @param language the language to evaluate it in
* @param boundVars the bindings to evaluate in the context of the script
* @param lifeCycle a set of functions that can be applied at various stages of the evaluation process
*/
public CompletableFuture<Object> eval(final String script, final String language, final Bindings boundVars, final LifeCycle lifeCycle) {
final String lang = Optional.ofNullable(language).orElse("gremlin-groovy");
logger.debug("Preparing to evaluate script - {} - in thread [{}]", script, Thread.currentThread().getName());
final Bindings bindings = new SimpleBindings();
bindings.putAll(globalBindings);
bindings.putAll(boundVars);
// override the timeout if the lifecycle has a value assigned
final long scriptEvalTimeOut = lifeCycle.getScriptEvaluationTimeoutOverride().orElse(scriptEvaluationTimeout);
final CompletableFuture<Object> evaluationFuture = new CompletableFuture<>();
final FutureTask<Void> evalFuture = new FutureTask<>(() -> {
try {
lifeCycle.getBeforeEval().orElse(beforeEval).accept(bindings);
logger.debug("Evaluating script - {} - in thread [{}]", script, Thread.currentThread().getName());
// do this weirdo check until the now deprecated ScriptEngines is gutted
final Object o = gremlinScriptEngineManager.getEngineByName(lang).eval(script, bindings);
// apply a transformation before sending back the result - useful when trying to force serialization
// in the same thread that the eval took place given ThreadLocal nature of graphs as well as some
// transactional constraints
final Object result = lifeCycle.getTransformResult().isPresent() ?
lifeCycle.getTransformResult().get().apply(o) : o;
// a mechanism for taking the final result and doing something with it in the same thread, but
// AFTER the eval and transform are done and that future completed. this provides a final means
// for working with the result in the same thread as it was eval'd
if (lifeCycle.getWithResult().isPresent()) lifeCycle.getWithResult().get().accept(result);
lifeCycle.getAfterSuccess().orElse(afterSuccess).accept(bindings);
// the evaluationFuture must be completed after all processing as an exception in lifecycle events
// that must raise as an exception to the caller who has the returned evaluationFuture. in other words,
// if it occurs before this point, then the handle() method won't be called again if there is an
// exception that ends up below trying to completeExceptionally()
evaluationFuture.complete(result);
} catch (Throwable ex) {
final Throwable root = null == ex.getCause() ? ex : ExceptionUtils.getRootCause(ex);
// thread interruptions will typically come as the result of a timeout, so in those cases,
// check for that situation and convert to TimeoutException
if (root instanceof InterruptedException) {
lifeCycle.getAfterTimeout().orElse(afterTimeout).accept(bindings);
evaluationFuture.completeExceptionally(new TimeoutException(
String.format("Script evaluation exceeded the configured 'scriptEvaluationTimeout' threshold of %s ms or evaluation was otherwise cancelled directly for request [%s]: %s", scriptEvalTimeOut, script, root.getMessage())));
} else {
lifeCycle.getAfterFailure().orElse(afterFailure).accept(bindings, root);
evaluationFuture.completeExceptionally(root);
}
}
return null;
});
final WeakReference<CompletableFuture<Object>> evaluationFutureRef = new WeakReference<>(evaluationFuture);
final Future<?> executionFuture = executorService.submit(evalFuture);
if (scriptEvalTimeOut > 0) {
// Schedule a timeout in the thread pool for future execution
final ScheduledFuture<?> sf = scheduledExecutorService.schedule(() -> {
if (executionFuture.cancel(true)) {
final CompletableFuture<Object> ef = evaluationFutureRef.get();
if (ef != null) {
ef.completeExceptionally(new TimeoutException(
String.format("Script evaluation exceeded the configured 'scriptEvaluationTimeout' threshold of %s ms or evaluation was otherwise cancelled directly for request [%s]", scriptEvalTimeOut, script)));
}
}
}, scriptEvalTimeOut, TimeUnit.MILLISECONDS);
// Cancel the scheduled timeout if the eval future is complete or the script evaluation failed with exception
evaluationFuture.handleAsync((v, t) -> {
if (!sf.isDone()) {
logger.debug("Killing scheduled timeout on script evaluation - {} - as the eval completed (possibly with exception).", script);
sf.cancel(true);
}
// no return is necessary - nothing downstream is concerned with what happens in here
return null;
}, scheduledExecutorService);
}
return evaluationFuture;
}
/**
* Evaluates bytecode with bindings for a specific language into a {@link Traversal}.
*
* @deprecated As of release 3.2.7, replaced by {@link #eval(Bytecode, Bindings, String, String)}
*/
@Deprecated
public Traversal.Admin eval(final Bytecode bytecode, final Bindings boundVars, final String language, final String traversalSource) throws ScriptException {
final String lang = Optional.ofNullable(language).orElse("gremlin-groovy");
final Bindings bindings = new SimpleBindings();
bindings.putAll(globalBindings);
bindings.putAll(boundVars);
return gremlinScriptEngineManager.getEngineByName(lang).eval(bytecode, bindings, traversalSource);
}
public GremlinScriptEngineManager getScriptEngineManager() {
return this.gremlinScriptEngineManager;
}
public ExecutorService getExecutorService() {
return executorService;
}
public ScheduledExecutorService getScheduledExecutorService() {
return scheduledExecutorService;
}
/**
* {@inheritDoc}
* <p/>
* Executors are only closed if they were not supplied externally in the
* {@link org.apache.tinkerpop.gremlin.groovy.engine.GremlinExecutor.Builder}
*/
@Override
public void close() throws Exception {
closeAsync().join();
}
/**
* Executors are only closed if they were not supplied externally in the
* {@link org.apache.tinkerpop.gremlin.groovy.engine.GremlinExecutor.Builder}
*/
public CompletableFuture<Void> closeAsync() throws Exception {
final CompletableFuture<Void> future = new CompletableFuture<>();
new Thread(() -> {
// leave pools running if they are supplied externally. let the sender be responsible for shutting them down
if (!suppliedExecutor) {
executorService.shutdown();
try {
if (!executorService.awaitTermination(180000, TimeUnit.MILLISECONDS))
logger.warn("Timeout while waiting for ExecutorService of GremlinExecutor to shutdown.");
} catch (InterruptedException ie) {
logger.warn("ExecutorService on GremlinExecutor may not have shutdown properly as shutdown thread terminated early.");
}
}
// calls to shutdown are idempotent so no problems calling it twice if the pool is shared
if (!suppliedScheduledExecutor) {
scheduledExecutorService.shutdown();
try {
if (!scheduledExecutorService.awaitTermination(180000, TimeUnit.MILLISECONDS))
logger.warn("Timeout while waiting for ScheduledExecutorService of GremlinExecutor to shutdown.");
} catch (InterruptedException ie) {
logger.warn("ScheduledExecutorService on GremlinExecutor may not have shutdown properly as shutdown thread terminated early.");
}
}
future.complete(null);
}, "gremlin-executor-close").start();
return future;
}
private void initializeGremlinScriptEngineManager() {
for (Map.Entry<String, Map<String, Map<String,Object>>> config : plugins.entrySet()) {
final String language = config.getKey();
final Map<String, Map<String,Object>> pluginConfigs = config.getValue();
for (Map.Entry<String, Map<String,Object>> pluginConfig : pluginConfigs.entrySet()) {
try {
final Class<?> clazz = Class.forName(pluginConfig.getKey());
// first try instance() and if that fails try to use build()
try {
final Method instanceMethod = clazz.getMethod("instance");
gremlinScriptEngineManager.addPlugin((GremlinPlugin) instanceMethod.invoke(null));
} catch (Exception ex) {
final Method builderMethod = clazz.getMethod("build");
Object pluginBuilder = builderMethod.invoke(null);
final Class<?> builderClazz = pluginBuilder.getClass();
final Map<String, Object> customizerConfigs = pluginConfig.getValue();
final Method[] methods = builderClazz.getMethods();
for (Map.Entry<String, Object> customizerConfig : customizerConfigs.entrySet()) {
final Method configMethod = Stream.of(methods).filter(m -> {
final Class<?> type = customizerConfig.getValue().getClass();
return m.getName().equals(customizerConfig.getKey()) && m.getParameters().length <= 1
&& ClassUtils.isAssignable(type, m.getParameters()[0].getType(), true);
}).findFirst()
.orElseThrow(() -> new IllegalStateException("Could not find builder method '" + customizerConfig.getKey() + "' on " + builderClazz.getCanonicalName()));
if (null == customizerConfig.getValue())
pluginBuilder = configMethod.invoke(pluginBuilder);
else
pluginBuilder = configMethod.invoke(pluginBuilder, customizerConfig.getValue());
}
try {
final Method appliesTo = builderClazz.getMethod("appliesTo", Collection.class);
pluginBuilder = appliesTo.invoke(pluginBuilder, Collections.singletonList(language));
} catch (NoSuchMethodException ignored) {
}
final Method create = builderClazz.getMethod("create");
gremlinScriptEngineManager.addPlugin((GremlinPlugin) create.invoke(pluginBuilder));
}
} catch (Exception ex) {
throw new IllegalStateException(ex);
}
}
}
gremlinScriptEngineManager.setBindings(globalBindings);
}
/**
* Create a {@code Builder} with the gremlin-groovy ScriptEngine configured.
*/
public static Builder build() {
return new Builder();
}
public final static class Builder {
private long scriptEvaluationTimeout = 8000;
private Map<String, Map<String, Map<String,Object>>> plugins = new HashMap<>();
private ExecutorService executorService = null;
private ScheduledExecutorService scheduledExecutorService = null;
private Consumer<Bindings> beforeEval = (b) -> {
};
private Consumer<Bindings> afterSuccess = (b) -> {
};
private Consumer<Bindings> afterTimeout = (b) -> {
};
private BiConsumer<Bindings, Throwable> afterFailure = (b, e) -> {
};
private Bindings globalBindings = new org.apache.tinkerpop.gremlin.jsr223.ConcurrentBindings();
private Builder() {
}
/**
* Add a configuration for a {@link GremlinPlugin} to the executor. The key is the fully qualified class name
* of the {@link GremlinPlugin} instance and the value is a map of configuration values. In that map, the key
* is the name of a builder method on the {@link GremlinPlugin} and the value is some argument to pass to that
* method.
*/
public Builder addPlugins(final String engineName, final Map<String, Map<String,Object>> plugins) {
this.plugins.put(engineName, plugins);
return this;
}
/**
* Bindings to apply to every script evaluated. Note that the entries of the supplied {@code Bindings} object
* will be copied into a newly created {@link org.apache.tinkerpop.gremlin.jsr223.ConcurrentBindings} object
* at the call of this method.
*/
public Builder globalBindings(final Bindings bindings) {
this.globalBindings = new org.apache.tinkerpop.gremlin.jsr223.ConcurrentBindings(bindings);
return this;
}
/**
* Amount of time a script has before it times out. Note that the time required covers both script evaluation
* as well as any time needed for a post result transformation (if the transformation function is supplied
* to the {@link GremlinExecutor#eval}).
*
* @param scriptEvaluationTimeout Time in milliseconds that a script is allowed to run and its
* results potentially transformed. Set to zero to have no timeout set.
*/
public Builder scriptEvaluationTimeout(final long scriptEvaluationTimeout) {
this.scriptEvaluationTimeout = scriptEvaluationTimeout;
return this;
}
/**
* The thread pool used to evaluate scripts.
*/
public Builder executorService(final ExecutorService executorService) {
this.executorService = executorService;
return this;
}
/**
* The thread pool used to schedule timeouts on scripts.
*/
public Builder scheduledExecutorService(final ScheduledExecutorService scheduledExecutorService) {
this.scheduledExecutorService = scheduledExecutorService;
return this;
}
/**
* A {@link Consumer} to execute just before the script evaluation.
*/
public Builder beforeEval(final Consumer<Bindings> beforeEval) {
this.beforeEval = beforeEval;
return this;
}
/**
* A {@link Consumer} to execute just after successful script evaluation. Note that success will be called
* after evaluation of the script in the engine and after the results have passed through transformation
* (if a transform function is passed to the {@link GremlinExecutor#eval}.
*/
public Builder afterSuccess(final Consumer<Bindings> afterSuccess) {
this.afterSuccess = afterSuccess;
return this;
}
/**
* A {@link Consumer} to execute if the script times out.
*/
public Builder afterTimeout(final Consumer<Bindings> afterTimeout) {
this.afterTimeout = afterTimeout;
return this;
}
/**
* A {@link Consumer} to execute in the event of failure.
*/
public Builder afterFailure(final BiConsumer<Bindings, Throwable> afterFailure) {
this.afterFailure = afterFailure;
return this;
}
public GremlinExecutor create() {
final BasicThreadFactory threadFactory = new BasicThreadFactory.Builder().namingPattern("gremlin-executor-default-%d").build();
final AtomicBoolean poolCreatedByBuilder = new AtomicBoolean();
final AtomicBoolean suppliedExecutor = new AtomicBoolean(true);
final AtomicBoolean suppliedScheduledExecutor = new AtomicBoolean(true);
final ExecutorService es = Optional.ofNullable(executorService).orElseGet(() -> {
poolCreatedByBuilder.set(true);
suppliedExecutor.set(false);
return Executors.newScheduledThreadPool(4, threadFactory);
});
executorService = es;
final ScheduledExecutorService ses = Optional.ofNullable(scheduledExecutorService).orElseGet(() -> {
// if the pool is created by the builder and we need another just re-use it, otherwise create
// a new one of those guys
suppliedScheduledExecutor.set(false);
return (poolCreatedByBuilder.get()) ?
(ScheduledExecutorService) es : Executors.newScheduledThreadPool(4, threadFactory);
});
scheduledExecutorService = ses;
return new GremlinExecutor(this, suppliedExecutor.get(), suppliedScheduledExecutor.get());
}
}
/**
* The lifecycle of execution within the {@link #eval(String, String, Bindings, LifeCycle)} method. Since scripts
* are executed in a thread pool and graph transactions are bound to a thread all actions related to that script
* evaluation, both before and after that evaluation, need to be executed in the same thread. This leads to a
* lifecycle of actions that can occur within that evaluation. Note that some of these options can be globally
* set on the {@code GremlinExecutor} itself through the {@link GremlinExecutor.Builder}. If specified here,
* they will override those global settings.
*/
public static class LifeCycle {
private final Optional<Consumer<Bindings>> beforeEval;
private final Optional<Function<Object, Object>> transformResult;
private final Optional<Consumer<Object>> withResult;
private final Optional<Consumer<Bindings>> afterSuccess;
private final Optional<Consumer<Bindings>> afterTimeout;
private final Optional<BiConsumer<Bindings, Throwable>> afterFailure;
private final Optional<Long> scriptEvaluationTimeoutOverride;
private LifeCycle(final Builder builder) {
beforeEval = Optional.ofNullable(builder.beforeEval);
transformResult = Optional.ofNullable(builder.transformResult);
withResult = Optional.ofNullable(builder.withResult);
afterSuccess = Optional.ofNullable(builder.afterSuccess);
afterTimeout = Optional.ofNullable(builder.afterTimeout);
afterFailure = Optional.ofNullable(builder.afterFailure);
scriptEvaluationTimeoutOverride = Optional.ofNullable(builder.scriptEvaluationTimeoutOverride);
}
public Optional<Long> getScriptEvaluationTimeoutOverride() {
return scriptEvaluationTimeoutOverride;
}
public Optional<Consumer<Bindings>> getBeforeEval() {
return beforeEval;
}
public Optional<Function<Object, Object>> getTransformResult() {
return transformResult;
}
public Optional<Consumer<Object>> getWithResult() {
return withResult;
}
public Optional<Consumer<Bindings>> getAfterSuccess() {
return afterSuccess;
}
public Optional<Consumer<Bindings>> getAfterTimeout() {
return afterTimeout;
}
public Optional<BiConsumer<Bindings, Throwable>> getAfterFailure() {
return afterFailure;
}
public static Builder build() {
return new Builder();
}
public static class Builder {
private Consumer<Bindings> beforeEval = null;
private Function<Object, Object> transformResult = null;
private Consumer<Object> withResult = null;
private Consumer<Bindings> afterSuccess = null;
private Consumer<Bindings> afterTimeout = null;
private BiConsumer<Bindings, Throwable> afterFailure = null;
private Long scriptEvaluationTimeoutOverride = null;
/**
* Specifies the function to execute prior to the script being evaluated. This function can also be
* specified globally on {@link GremlinExecutor.Builder#beforeEval(Consumer)}.
*/
public Builder beforeEval(final Consumer<Bindings> beforeEval) {
this.beforeEval = beforeEval;
return this;
}
/**
* Specifies the function to execute on the result of the script evaluation just after script evaluation
* returns but before the script evaluation is marked as complete.
*/
public Builder transformResult(final Function<Object, Object> transformResult) {
this.transformResult = transformResult;
return this;
}
/**
* Specifies the function to execute on the result of the script evaluation just after script evaluation
* returns but before the script evaluation is marked as complete.
*/
public Builder withResult(final Consumer<Object> withResult) {
this.withResult = withResult;
return this;
}
/**
* Specifies the function to execute after result transformations. This function can also be
* specified globally on {@link GremlinExecutor.Builder#afterSuccess(Consumer)}. The script evaluation
* will be marked as "complete" after this method.
*/
public Builder afterSuccess(final Consumer<Bindings> afterSuccess) {
this.afterSuccess = afterSuccess;
return this;
}
/**
* Specifies the function to execute if the script evaluation times out. This function can also be
* specified globally on {@link GremlinExecutor.Builder#afterTimeout(Consumer)}.
*/
public Builder afterTimeout(final Consumer<Bindings> afterTimeout) {
this.afterTimeout = afterTimeout;
return this;
}
/**
* Specifies the function to execute if the script evaluation fails. This function can also be
* specified globally on {@link GremlinExecutor.Builder#afterFailure(BiConsumer)}.
*/
public Builder afterFailure(final BiConsumer<Bindings, Throwable> afterFailure) {
this.afterFailure = afterFailure;
return this;
}
/**
* An override to the global {@code scriptEvaluationTimeout} setting on the script engine. If this value
* is set to {@code null} (the default) it will use the global setting.
*/
public Builder scriptEvaluationTimeoutOverride(final Long scriptEvaluationTimeoutOverride) {
this.scriptEvaluationTimeoutOverride = scriptEvaluationTimeoutOverride;
return this;
}
public LifeCycle create() {
return new LifeCycle(this);
}
}
}
}
| |
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.test.database.auto;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.orientechnologies.orient.client.db.ODatabaseHelper;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.db.graph.OGraphDatabase;
import com.orientechnologies.orient.core.db.graph.OGraphDatabasePool;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.intent.OIntentMassiveInsert;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
import com.orientechnologies.orient.core.tx.OTransaction;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
import com.orientechnologies.orient.object.db.graph.OGraphElement;
@Test
public class GraphDatabaseTest {
private OGraphDatabase database;
private String url;
@Parameters(value = "url")
public GraphDatabaseTest(String iURL) {
database = new OGraphDatabase(iURL);
url = iURL;
}
@Test
public void testPool() throws IOException {
final OGraphDatabase[] dbs = new OGraphDatabase[OGraphDatabasePool.global().getMaxSize()];
for (int i = 0; i < 10; ++i) {
for (int db = 0; db < dbs.length; ++db)
dbs[db] = OGraphDatabasePool.global().acquire(url, "admin", "admin");
for (int db = 0; db < dbs.length; ++db)
dbs[db].close();
}
}
@BeforeMethod
public void init() {
database.open("admin", "admin");
}
@AfterMethod
public void deinit() {
database.close();
}
@Test
public void alterDb() {
database.command(new OCommandSQL("alter database type graph")).execute();
}
@Test(dependsOnMethods = "alterDb")
public void populate() {
OClass vehicleClass = database.createVertexType("GraphVehicle");
database.createVertexType("GraphCar", vehicleClass);
database.createVertexType("GraphMotocycle", "GraphVehicle");
ODocument carNode = (ODocument) database.createVertex("GraphCar").field("brand", "Hyundai").field("model", "Coupe")
.field("year", 2003).save();
ODocument motoNode = (ODocument) database.createVertex("GraphMotocycle").field("brand", "Yamaha").field("model", "X-City 250")
.field("year", 2009).save();
database.createEdge(carNode, motoNode).save();
List<ODocument> result = database.query(new OSQLSynchQuery<ODocument>("select from GraphVehicle"));
Assert.assertEquals(result.size(), 2);
for (ODocument v : result) {
Assert.assertTrue(v.getSchemaClass().isSubClassOf(vehicleClass));
}
database.close();
database.open("admin", "admin");
database.getMetadata().getSchema().reload();
result = database.query(new OSQLSynchQuery<ODocument>("select from GraphVehicle"));
Assert.assertEquals(result.size(), 2);
ODocument edge1 = null;
ODocument edge2 = null;
for (ODocument v : result) {
Assert.assertTrue(v.getSchemaClass().isSubClassOf("GraphVehicle"));
if (v.getClassName().equals("GraphCar")) {
Assert.assertEquals(database.getOutEdges(v).size(), 1);
edge1 = (ODocument) database.getOutEdges(v).iterator().next();
} else {
Assert.assertEquals(database.getInEdges(v).size(), 1);
edge2 = (ODocument) database.getInEdges(v).iterator().next();
}
}
Assert.assertEquals(edge1, edge2);
}
@Test(dependsOnMethods = "populate")
public void testSQLAgainstGraph() {
ODocument tom = (ODocument) database.createVertex().field("name", "Tom").save();
ODocument ferrari = (ODocument) database.createVertex("GraphCar").field("brand", "Ferrari").save();
ODocument maserati = (ODocument) database.createVertex("GraphCar").field("brand", "Maserati").save();
ODocument porsche = (ODocument) database.createVertex("GraphCar").field("brand", "Porsche").save();
database.createEdge(tom, ferrari).field("label", "drives").save();
database.createEdge(tom, maserati).field("label", "drives").save();
database.createEdge(tom, porsche).field("label", "owns").save();
Assert.assertNotNull(database.getOutEdges(tom, "drives"));
Assert.assertFalse(database.getOutEdges(tom, "drives").isEmpty());
List<OGraphElement> result = database.query(new OSQLSynchQuery<OGraphElement>(
"select out[in.@class = 'GraphCar'].in from V where name = 'Tom'"));
Assert.assertEquals(result.size(), 1);
result = database.query(new OSQLSynchQuery<OGraphElement>(
"select out[label='drives'][in.brand = 'Ferrari'].in from V where name = 'Tom'"));
Assert.assertEquals(result.size(), 1);
result = database.query(new OSQLSynchQuery<OGraphElement>("select out[in.brand = 'Ferrari'].in from V where name = 'Tom'"));
Assert.assertEquals(result.size(), 1);
}
@Test
public void testDictionary() {
ODocument rootNode = database.createVertex().field("id", 54254454);
database.setRoot("test123", rootNode);
rootNode.save();
database.close();
database.open("admin", "admin");
ODocument secroot = database.getRoot("test123");
Assert.assertEquals(secroot.getIdentity(), rootNode.getIdentity());
}
@Test
public void testSubVertexQuery() {
database.createVertexType("newV").createProperty("f_int", OType.INTEGER).createIndex(OClass.INDEX_TYPE.UNIQUE);
database.getMetadata().getSchema().save();
database.createVertex("newV").field("f_int", 2).save();
database.createVertex("newV").field("f_int", 1).save();
database.createVertex("newV").field("f_int", 3).save();
// query 1
String q = "select * from V where f_int between 0 and 10";
List<ODocument> resB = database.query(new OSQLSynchQuery<ODocument>(q));
System.out.println(q + ": ");
for (OIdentifiable v : resB) {
System.out.println(v);
}
// query 2
q = "select * from newV where f_int between 0 and 10";
List<ODocument> resB2 = database.query(new OSQLSynchQuery<ODocument>(q));
System.out.println(q + ": ");
for (OIdentifiable v : resB2) {
System.out.println(v);
}
}
public void testNotDuplicatedIndexTxChanges() throws IOException {
OClass oc = database.getVertexType("vertexA");
if (oc == null)
oc = database.createVertexType("vertexA");
if (!oc.existsProperty("name"))
oc.createProperty("name", OType.STRING);
if (oc.getClassIndex("vertexA_name_idx") == null)
oc.createIndex("vertexA_name_idx", OClass.INDEX_TYPE.UNIQUE, "name");
// FIRST: create a couple of records
ODocument docA = database.createVertex("vertexA");
docA.field("name", "myKey");
database.save(docA);
ODocument docB = database.createVertex("vertexA");
docA.field("name", "anotherKey");
database.save(docB);
database.begin();
database.delete(docB);
database.delete(docA);
ODocument docKey = database.createVertex("vertexA");
docKey.field("name", "myKey");
database.save(docKey);
database.commit();
}
public void testAutoEdge() throws IOException {
ODocument docA = database.createVertex();
docA.field("name", "selfEdgeTest");
database.createEdge(docA, docA).save();
docA.reload();
}
public void testNewVertexAndEdgesWithFieldsInOneShoot() throws IOException {
ODocument docA = database.createVertex(null, "field1", "value1", "field2", "value2");
Map<String, Object> map = new HashMap<String, Object>();
map.put("field1", "value1");
map.put("field2", "value2");
ODocument docB = database.createVertex(null, map);
ODocument docC = database.createEdge(docA, docA, null, "edgeF1", "edgeV2").save();
Assert.assertEquals(docA.field("field1"), "value1");
Assert.assertEquals(docA.field("field2"), "value2");
Assert.assertEquals(docB.field("field1"), "value1");
Assert.assertEquals(docB.field("field2"), "value2");
Assert.assertEquals(docC.field("edgeF1"), "edgeV2");
}
public void testEdgesIterationInTX() {
database.createVertexType("vertexAA");
database.createVertexType("vertexBB");
database.createEdgeType("edgeAB");
ODocument vertexA = (ODocument) database.createVertex("vertexAA").field("address", "testing").save();
for (int i = 0; i < 18; ++i) {
ODocument vertexB = (ODocument) database.createVertex("vertexBB").field("address", "test" + i).save();
database.begin(OTransaction.TXTYPE.OPTIMISTIC);
database.createEdge(vertexB.getIdentity(), vertexA.getIdentity(), "edgeAB").save();
database.commit();
}
List<ODocument> result = database.query(new OSQLSynchQuery<ODocument>("select * from vertexAA"));
for (ODocument d : result) {
Set<OIdentifiable> edges = database.getInEdges(d);
for (OIdentifiable e : edges) {
System.out.println("In Edge: " + e);
}
}
}
//
// @Test
// public void testTxDictionary() {
// database.open("admin", "admin");
//
// database.begin();
//
// try {
// ODocument rootNode = database.createVertex().field("id", 54254454);
// database.setRoot("test123", rootNode);
// rootNode.save();
//
// database.commit();
//
// database.close();
// database.open("admin", "admin");
//
// ODocument secroot = database.getRoot("test123");
// Assert.assertEquals(secroot.getIdentity(), rootNode.getIdentity());
// } finally {
// database.close();
// }
// }
/**
* @author bill@tobecker.com
*/
public void testTxField() {
if (database.getVertexType("PublicCert") == null)
database.createVertexType("PublicCert");
// Step 1
// create a public cert with some field set
ODocument publicCert = (ODocument) database.createVertex("PublicCert").field("address", "drevil@myco.mn.us").save();
// Step 2
// update the public cert field in transaction
database.begin(TXTYPE.OPTIMISTIC);
publicCert.field("address", "newaddress@myco.mn.us").save();
database.commit();
// Step 3
// try transaction with a rollback
database.begin(TXTYPE.OPTIMISTIC);
database.createVertex("PublicCert").field("address", "iagor@myco.mn.us").save();
database.rollback();
// Step 4
// just show what is there
List<ODocument> result = database.query(new OSQLSynchQuery<ODocument>("select * from PublicCert"));
for (ODocument d : result) {
System.out.println("(-1) Vertex: " + d);
}
// Step 5
// try deleting all the stuff
database.command(new OCommandSQL("delete from PublicCert")).execute();
}
@Test(dependsOnMethods = "populate")
public void testEdgeWithRID() {
database.declareIntent(new OIntentMassiveInsert());
ODocument a = database.createVertex().field("label", "a");
a.save();
ODocument b = database.createVertex().field("label", "b");
b.save();
ODocument c = database.createVertex().field("label", "c");
c.save();
database.createEdge(a.getIdentity(), b.getIdentity()).save();
database.createEdge(a.getIdentity(), c.getIdentity()).save();
a.reload();
// Assert.assertEquals(database.getOutEdges(a).size(), 2);
}
@Test(dependsOnMethods = "populate")
public void testEdgeCreationIn2Steps() {
// add source
ODocument sourceDoc = database.createVertex();
sourceDoc.field("name", "MyTest", OType.STRING);
sourceDoc.save();
// add first office
ODocument office1Doc = database.createVertex();
office1Doc.field("name", "office1", OType.STRING);
office1Doc.save();
List<ODocument> source1 = database.query(new OSQLSynchQuery<ODocument>("select * from V where name = 'MyTest'"));
for (int i = 0; i < source1.size(); i++)
database.createEdge(source1.get(i), office1Doc).field("label", "office", OType.STRING).save();
String query11 = "select out[label='office'].size() from V where name = 'MyTest'";
List<ODocument> officesDoc11 = database.query(new OSQLSynchQuery<ODocument>(query11));
System.out.println(officesDoc11);
// add second office
ODocument office2Doc = database.createVertex();
office2Doc.field("name", "office2", OType.STRING);
office2Doc.save();
List<ODocument> source2 = database.query(new OSQLSynchQuery<ODocument>("select * from V where name = 'MyTest'"));
for (int i = 0; i < source2.size(); i++)
database.createEdge(source2.get(i), office2Doc).field("label", "office", OType.STRING).save();
String query21 = "select out[label='office'].size() from V where name = 'MyTest'";
List<ODocument> officesDoc21 = database.query(new OSQLSynchQuery<ODocument>(query21));
System.out.println(officesDoc21);
}
@Test
public void saveEdges() {
database.declareIntent(new OIntentMassiveInsert());
ODocument v = database.createVertex();
v.field("name", "superNode");
long insertBegin = System.currentTimeMillis();
long begin = insertBegin;
Set<Integer> identities = new HashSet<Integer>(1000);
for (int i = 1; i <= 1000; ++i) {
database.createEdge(v, database.createVertex().field("id", i)).save();
Assert.assertTrue(identities.add(i));
if (i % 100 == 0) {
final long now = System.currentTimeMillis();
System.out.printf("\nInserted %d edges, elapsed %d ms. v.out=%d", i, now - begin, ((Set<?>) v.field("out")).size());
begin = System.currentTimeMillis();
}
}
Assert.assertEquals(identities.size(), 1000);
int originalEdges = ((Set<?>) v.field("out")).size();
System.out.println("Edge count (Original instance): " + originalEdges);
ODocument x = database.load(v.getIdentity());
int loadedEdges = ((Set<?>) x.field("out")).size();
System.out.println("Edge count (Loaded instance): " + loadedEdges);
Assert.assertEquals(originalEdges, loadedEdges);
long now = System.currentTimeMillis();
System.out.printf("\nInsertion completed in %dms. DB edges %d, DB vertices %d", now - insertBegin, database.countEdges(),
database.countVertexes());
int i = 1;
for (OIdentifiable e : database.getOutEdges(v)) {
Integer currentIdentity = database.getInVertex(e).field("id");
Assert.assertTrue(identities.contains(currentIdentity));
Assert.assertTrue(identities.remove(currentIdentity));
if (i % 100 == 0) {
now = System.currentTimeMillis();
System.out.printf("\nRead %d edges and %d vertices, elapsed %d ms", i, i, now - begin);
begin = System.currentTimeMillis();
}
i++;
}
Assert.assertTrue(identities.isEmpty());
database.declareIntent(null);
}
@Test
public void sqlInsertIntoVertexes() {
List<OIdentifiable> vertices = database.command(new OCommandSQL("select from V limit 2")).execute();
Assert.assertEquals(vertices.size(), 2);
OIdentifiable v1 = ((ODocument) vertices.get(0)).reload();
OIdentifiable v2 = ((ODocument) vertices.get(1)).reload();
final int v1Edges = database.getOutEdges(v1).size();
final int v2Edges = database.getInEdges(v2).size();
ODocument e = (ODocument) database.command(new OCommandSQL("insert into E SET out = ?, in = ?")).execute(v1, v2);
database.command(new OCommandSQL("update " + v1.getIdentity() + " ADD out = " + e.getIdentity())).execute();
database.command(new OCommandSQL("update " + v2.getIdentity() + " ADD in = " + e.getIdentity())).execute();
ODocument doc1 = ((ODocument) v1.getRecord().reload());
ODocument doc2 = ((ODocument) v2.getRecord().reload());
Assert.assertEquals(database.getOutEdges(doc1).size(), v1Edges + 1);
Assert.assertEquals(database.getInEdges(doc2).size(), v2Edges + 1);
}
@Test
public void sqlNestedQueries() {
ODocument sourceDoc1 = database.createVertex().field("driver", "John", OType.STRING).save();
ODocument targetDoc1 = database.createVertex().field("car", "ford", OType.STRING).save();
ODocument targetDoc2 = database.createVertex().field("car", "audi", OType.STRING).save();
database.createEdge(sourceDoc1, targetDoc1).field("color", "red", OType.STRING).field("action", "owns", OType.STRING).save();
database.createEdge(sourceDoc1, targetDoc2).field("color", "red", OType.STRING).field("action", "wants", OType.STRING).save();
String query1 = "select driver from V where out.in.car in 'ford'";
List<ODocument> result = database.query(new OSQLSynchQuery<ODocument>(query1));
Assert.assertEquals(result.size(), 1);
String query2 = "select driver from V where out[color='red'].in.car in 'ford'";
result = database.query(new OSQLSynchQuery<ODocument>(query2));
Assert.assertEquals(result.size(), 1);
String query3 = "select driver from V where out[action='owns'].in.car in 'ford'";
result = database.query(new OSQLSynchQuery<ODocument>(query3));
Assert.assertEquals(result.size(), 1);
String query4 = "select driver from V where out[color='red'][action='owns'].in.car in 'ford'";
result = database.query(new OSQLSynchQuery<ODocument>(query4));
Assert.assertEquals(result.size(), 1);
database.removeVertex(sourceDoc1);
database.removeVertex(targetDoc1);
database.removeVertex(targetDoc2);
}
public void nestedQuery() {
ODocument countryDoc1 = database.createVertex().field("name", "UK").field("area", "Europe").field("code", "2").save();
ODocument cityDoc1 = database.createVertex().field("name", "leicester").field("lat", "52.64640").field("long", "-1.13159")
.save();
ODocument cityDoc2 = database.createVertex().field("name", "manchester").field("lat", "53.47497").field("long", "-2.25769")
.save();
database.createEdge(countryDoc1, cityDoc1).field("label", "owns").save();
database.createEdge(countryDoc1, cityDoc2).field("label", "owns").save();
String subquery = "select out[label='owns'].in from V where name = 'UK'";
List<ODocument> result = database.query(new OSQLSynchQuery<ODocument>(subquery));
Assert.assertEquals(result.size(), 1);
Assert.assertEquals(((Collection<ODocument>) result.get(0).field("out")).size(), 2);
subquery = "select flatten(out[label='owns'].in) from V where name = 'UK'";
result = database.query(new OSQLSynchQuery<ODocument>(subquery));
Assert.assertEquals(result.size(), 2);
for (int i = 0; i < result.size(); i++) {
System.out.println("uno: " + result.get(i));
Assert.assertTrue(result.get(i).containsField("lat"));
}
String query = "select name, lat, long, distance(lat,long,51.5,0.08) as distance from (select flatten(out[label='owns'].in) from V where name = 'UK') order by distance";
result = database.query(new OSQLSynchQuery<ODocument>(query));
Assert.assertEquals(result.size(), 2);
for (int i = 0; i < result.size(); i++) {
System.out.println("dos: " + result.get(i));
Assert.assertTrue(result.get(i).containsField("lat"));
Assert.assertTrue(result.get(i).containsField("distance"));
}
}
public void testFlattenBlankDatabase() throws IOException {
String iUrl = url;
iUrl.replace("\\", "/");
if (iUrl.endsWith("/"))
iUrl = iUrl.substring(0, iUrl.length() - 1);
if (iUrl.contains("/")) {
iUrl = iUrl.substring(0, iUrl.lastIndexOf("/") + 1) + "flattenTest";
} else {
iUrl = iUrl.substring(0, iUrl.indexOf(":") + 1) + "flattenTest";
}
ODatabaseDocument db = new ODatabaseDocumentTx(iUrl);
ODatabaseHelper.createDatabase(db, iUrl);
db.close();
OGraphDatabase database = new OGraphDatabase(iUrl);
database.open("admin", "admin");
ODocument playerDoc = database.createVertex().field("surname", "Torres").save();
ODocument teamDoc = database.createVertex().field("team", "Chelsea").save();
database.createEdge(playerDoc, teamDoc).field("label", "player").save();
String query = "select flatten(out[label='player'].in) from V where surname = 'Torres'";
List<ODocument> result = database.query(new OSQLSynchQuery<ODocument>(query));
for (int i = 0; i < result.size(); i++) {
Assert.assertTrue(result.get(i).containsField("team"));
Assert.assertTrue(result.get(i).field("team").equals("Chelsea"));
}
database.removeVertex(playerDoc);
database.removeVertex(teamDoc);
ODatabaseHelper.deleteDatabase(database);
database.close();
}
public void checkDijkstra() {
String subquery = "select $current, Dijkstra($current, #53:3, 'weight') as path from V where 1 > 0";
List<ODocument> result = database.query(new OSQLSynchQuery<ODocument>(subquery));
Assert.assertFalse(result.isEmpty());
}
public void testSQLManagementOfUnderlyingDocumentsInGraphs() {
Object result;
result = database.command(new OCommandSQL("create class V1 extends V")).execute();
result = database.command(new OCommandSQL("create class E1 extends E")).execute();
OIdentifiable v1 = database.command(new OCommandSQL("create vertex V1 set name = 'madeInSqlLand'")).execute();
OIdentifiable v2 = database.command(new OCommandSQL("create vertex V1 set name = 'madeInSqlLand'")).execute();
OIdentifiable v3 = database.command(new OCommandSQL("create vertex V1 set name = 'madeInSqlLand'")).execute();
List<OIdentifiable> e1 = database.command(
new OCommandSQL("create edge E1 from " + v1.getIdentity() + " to " + v2.getIdentity() + " set name = 'wow' ")).execute();
List<OIdentifiable> e2 = database.command(
new OCommandSQL("create edge E1 from " + v1.getIdentity() + " to " + v3.getIdentity() + " set name = 'wow' ")).execute();
result = database.command(
new OCommandSQL("delete edge from " + v1.getIdentity() + " to " + v2.getIdentity() + " where name = 'wow'")).execute();
Assert.assertEquals(result, 1);
result = database.command(new OCommandSQL("delete edge where name = 'wow'")).execute();
Assert.assertEquals(result, 1);
result = database.command(new OCommandSQL("delete from V1 where @rid = ?")).execute(v2);
Assert.assertEquals(result, 1);
result = database.command(new OCommandSQL("delete from V1 where @rid = ?")).execute(v3);
Assert.assertEquals(result, 1);
result = database.command(new OCommandSQL("create property V1.ctime DATETIME")).execute();
// result = database.command(new OCommandSQL("update V1 set ctime=sysdate() where name = 'madeInSqlLand'")).execute();
result = database.command(new OCommandSQL("drop class V1")).execute();
result = database.command(new OCommandSQL("drop class E1")).execute();
}
}
| |
/*
* Copyright (c) 2014. Real Time Genomics Limited.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.rtg.util.intervals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.stream.Collectors;
/**
* A structure that holds multiple, possibly overlapping ranges that can be searched for a point within the ranges.
*
* Makes use of a segment tree <code>http://en.wikipedia.org/wiki/Segment_tree</code> to store and efficiently look up meta-data within ranges.
* The input meta-data are split and merged to form a single set of non-overlapping meta-data ranges.
* A continuous set of ranges (with and without meta-data) is stored in the ranges array.
* A simple binary search mechanism is used to look up the meta-data for a given location.
*/
public class RangeList<T> {
/**
* Range that links to other ranges which enclose it.
*/
public static final class RangeView<T> extends Range {
private List<RangeMeta<T>> mEnclosingRanges = null;
private RangeView(int start, int end) {
super(start, end);
}
/** @return true if this range has enclosing ranges */
public boolean hasRanges() {
return mEnclosingRanges != null;
}
/** @return a list of the meta information from all ranges spanning this range */
public List<T> getMeta() {
if (mEnclosingRanges == null) {
return null;
}
return mEnclosingRanges.stream().map(RangeMeta::getMeta).collect(Collectors.toList());
}
private void addEnclosingRange(RangeMeta<T> range) {
assert range.getStart() <= getStart() && range.getEnd() >= getEnd();
if (mEnclosingRanges == null) {
mEnclosingRanges = new ArrayList<>();
}
mEnclosingRanges.add(range);
}
/**
* @return the list of ranges as originally specified that are covered by this range
*/
public List<RangeMeta<T>> getEnclosingRanges() {
return mEnclosingRanges;
}
}
private final List<RangeView<T>> mRanges;
private final List<RangeView<T>> mNonEmptyRanges;
/**
* Convenience constructor.
* @param range a meta-data range to store for searching.
*/
public RangeList(RangeMeta<T> range) {
this(Collections.singletonList(range));
}
/**
* Constructor.
* @param ranges the list of meta-data ranges to store for searching.
*/
public RangeList(List<RangeMeta<T>> ranges) {
if (ranges == null || ranges.isEmpty()) {
mRanges = new ArrayList<>(1);
mRanges.add(new RangeView<>(Integer.MIN_VALUE, Integer.MAX_VALUE));
} else {
// get list of range boundaries
mRanges = getRangeViews(ranges);
// load input ranges into the non-overlapping views
for (final RangeMeta<T> range : ranges) {
int index = findFullRangeIndex(range.getStart());
while (index < mRanges.size() && mRanges.get(index).getEnd() <= range.getEnd()) {
mRanges.get(index).addEnclosingRange(range);
++index;
}
}
}
mNonEmptyRanges = new ArrayList<>();
for (final RangeView<T> range : mRanges) {
if (range.hasRanges()) {
mNonEmptyRanges.add(range);
}
}
}
// Create a list of non-overlapping RangeView objects
private static <U> List<RangeView<U>> getRangeViews(List<RangeMeta<U>> ranges) {
final HashSet<Integer> pivots = new HashSet<>();
for (final RangeMeta<?> range : ranges) {
pivots.add(range.getStart());
pivots.add(range.getEnd());
}
final int[] pivots2 = new int[pivots.size()];
int i2 = 0;
for (final Integer x : pivots) {
pivots2[i2] = x;
++i2;
}
Arrays.sort(pivots2);
// set up continuous non-overlapping ranges for -inf to +inf
final List<RangeView<U>> views = new ArrayList<>(pivots2.length + 1);
if (pivots2[0] != Integer.MIN_VALUE) {
views.add(new RangeView<>(Integer.MIN_VALUE, pivots2[0]));
}
for (int i = 1; i < pivots2.length; ++i) {
views.add(new RangeView<>(pivots2[i - 1], pivots2[i]));
}
if (pivots2[pivots2.length - 1] != Integer.MAX_VALUE) {
views.add(new RangeView<>(pivots2[pivots2.length - 1], Integer.MAX_VALUE));
}
return views;
}
/**
* Return the list of meta-data for a given location. Returns null if no meta-data exists.
* @param loc the position to look up the containing range for.
* @return meta-data list
*/
public List<T> find(int loc) {
return findRange(loc).getMeta();
}
private RangeView<T> findRange(int loc) {
return mRanges.get(findFullRangeIndex(loc));
}
/**
* @return the list of non-empty ranges
*/
public List<RangeView<T>> getRangeList() {
return mNonEmptyRanges;
}
/**
* @return the full list of ranges, includes ranges corresponding to empty intervals
*/
public List<RangeView<T>> getFullRangeList() {
return mRanges;
}
/**
* Return the index of the range within the full range list containing the specified point.
* @param loc the position to search
* @return the index of the range entry containing the position
*/
public final int findFullRangeIndex(int loc) {
if (loc == Integer.MAX_VALUE) {
return mRanges.size() - 1;
}
int min = 0;
int max = mRanges.size();
int res = (max + min) / 2;
// binary search
boolean found = false;
while (!found) {
//System.err.println(min + " " + res + " " + max + " : " + loc + " " + ranges[res]);
final RangeView<T> range = mRanges.get(res);
if (range.contains(loc)) {
found = true;
} else {
if (loc < range.getStart()) {
max = res;
} else {
min = res;
}
res = (max + min) / 2;
}
}
return res;
}
@Override
public String toString() {
return mNonEmptyRanges.toString();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.KeyedLock;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.plain.*;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
/**
*/
public class IndexFieldDataService extends AbstractIndexComponent {
public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache";
public static final String FIELDDATA_CACHE_VALUE_NODE = "node";
private static final String DISABLED_FORMAT = "disabled";
private static final String DOC_VALUES_FORMAT = "doc_values";
private static final String ARRAY_FORMAT = "array";
private static final String PAGED_BYTES_FORMAT = "paged_bytes";
private static final String FST_FORMAT = "fst";
private static final String COMPRESSED_FORMAT = "compressed";
private final static ImmutableMap<String, IndexFieldData.Builder> buildersByType;
private final static ImmutableMap<String, IndexFieldData.Builder> docValuesBuildersByType;
private final static ImmutableMap<Tuple<String, String>, IndexFieldData.Builder> buildersByTypeAndFormat;
private final CircuitBreakerService circuitBreakerService;
static {
buildersByType = MapBuilder.<String, IndexFieldData.Builder>newMapBuilder()
.put("string", new PagedBytesIndexFieldData.Builder())
.put("float", new FloatArrayIndexFieldData.Builder())
.put("double", new DoubleArrayIndexFieldData.Builder())
.put("byte", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.BYTE))
.put("short", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.SHORT))
.put("int", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.INT))
.put("long", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.LONG))
.put("geo_point", new GeoPointDoubleArrayIndexFieldData.Builder())
.put(ParentFieldMapper.NAME, new ParentChildIndexFieldData.Builder())
.put(IndexFieldMapper.NAME, new IndexIndexFieldData.Builder())
.put("binary", new DisabledIndexFieldData.Builder())
.put(BooleanFieldMapper.CONTENT_TYPE, new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.BOOLEAN))
.immutableMap();
docValuesBuildersByType = MapBuilder.<String, IndexFieldData.Builder>newMapBuilder()
.put("string", new DocValuesIndexFieldData.Builder())
.put("float", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT))
.put("double", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.DOUBLE))
.put("byte", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BYTE))
.put("short", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.SHORT))
.put("int", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.INT))
.put("long", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG))
.put("geo_point", new GeoPointBinaryDVIndexFieldData.Builder())
.put("binary", new BytesBinaryDVIndexFieldData.Builder())
.put(BooleanFieldMapper.CONTENT_TYPE, new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BOOLEAN))
.immutableMap();
buildersByTypeAndFormat = MapBuilder.<Tuple<String, String>, IndexFieldData.Builder>newMapBuilder()
.put(Tuple.tuple("string", PAGED_BYTES_FORMAT), new PagedBytesIndexFieldData.Builder())
.put(Tuple.tuple("string", FST_FORMAT), new FSTBytesIndexFieldData.Builder())
.put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
.put(Tuple.tuple("string", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("float", ARRAY_FORMAT), new FloatArrayIndexFieldData.Builder())
.put(Tuple.tuple("float", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT))
.put(Tuple.tuple("float", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("double", ARRAY_FORMAT), new DoubleArrayIndexFieldData.Builder())
.put(Tuple.tuple("double", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.DOUBLE))
.put(Tuple.tuple("double", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("byte", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.BYTE))
.put(Tuple.tuple("byte", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BYTE))
.put(Tuple.tuple("byte", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("short", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.SHORT))
.put(Tuple.tuple("short", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.SHORT))
.put(Tuple.tuple("short", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("int", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.INT))
.put(Tuple.tuple("int", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.INT))
.put(Tuple.tuple("int", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("long", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.LONG))
.put(Tuple.tuple("long", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG))
.put(Tuple.tuple("long", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", ARRAY_FORMAT), new GeoPointDoubleArrayIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DOC_VALUES_FORMAT), new GeoPointBinaryDVIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", COMPRESSED_FORMAT), new GeoPointCompressedIndexFieldData.Builder())
.put(Tuple.tuple("binary", DOC_VALUES_FORMAT), new BytesBinaryDVIndexFieldData.Builder())
.put(Tuple.tuple("binary", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.BOOLEAN))
.put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BOOLEAN))
.put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.immutableMap();
}
private final IndicesFieldDataCache indicesFieldDataCache;
private final ConcurrentMap<String, IndexFieldData<?>> loadedFieldData = ConcurrentCollections.newConcurrentMap();
private final KeyedLock.GlobalLockable<String> fieldLoadingLock = new KeyedLock.GlobalLockable<>();
private final Map<String, IndexFieldDataCache> fieldDataCaches = Maps.newHashMap(); // no need for concurrency support, always used under lock
IndexService indexService;
@Inject
public IndexFieldDataService(Index index, @IndexSettings Settings indexSettings, IndicesFieldDataCache indicesFieldDataCache,
CircuitBreakerService circuitBreakerService) {
super(index, indexSettings);
this.indicesFieldDataCache = indicesFieldDataCache;
this.circuitBreakerService = circuitBreakerService;
}
// we need to "inject" the index service to not create cyclic dep
public void setIndexService(IndexService indexService) {
this.indexService = indexService;
}
public void clear() {
fieldLoadingLock.globalLock().lock();
try {
List<Throwable> exceptions = new ArrayList<>(0);
final Collection<IndexFieldData<?>> fieldDataValues = loadedFieldData.values();
for (IndexFieldData<?> fieldData : fieldDataValues) {
try {
fieldData.clear();
} catch (Throwable t) {
exceptions.add(t);
}
}
fieldDataValues.clear();
final Collection<IndexFieldDataCache> fieldDataCacheValues = fieldDataCaches.values();
for (IndexFieldDataCache cache : fieldDataCacheValues) {
try {
cache.clear();
} catch (Throwable t) {
exceptions.add(t);
}
}
fieldDataCacheValues.clear();
ExceptionsHelper.maybeThrowRuntimeAndSuppress(exceptions);
} finally {
fieldLoadingLock.globalLock().unlock();
}
}
public void clearField(final String fieldName) {
fieldLoadingLock.acquire(fieldName);
try {
List<Throwable> exceptions = new ArrayList<>(0);
final IndexFieldData<?> fieldData = loadedFieldData.remove(fieldName);
if (fieldData != null) {
try {
fieldData.clear();
} catch (Throwable t) {
exceptions.add(t);
}
}
final IndexFieldDataCache cache = fieldDataCaches.remove(fieldName);
if (cache != null) {
try {
cache.clear();
} catch (Throwable t) {
exceptions.add(t);
}
}
ExceptionsHelper.maybeThrowRuntimeAndSuppress(exceptions);
} finally {
fieldLoadingLock.release(fieldName);
}
}
public void onMappingUpdate() {
// synchronize to make sure to not miss field data instances that are being loaded
fieldLoadingLock.globalLock().lock();
try {
// important: do not clear fieldDataCaches: the cache may be reused
loadedFieldData.clear();
} finally {
fieldLoadingLock.globalLock().unlock();
}
}
@SuppressWarnings("unchecked")
public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper mapper) {
final FieldMapper.Names fieldNames = mapper.names();
final FieldDataType type = mapper.fieldDataType();
if (type == null) {
throw new IllegalArgumentException("found no fielddata type for field [" + fieldNames.fullName() + "]");
}
final boolean docValues = mapper.hasDocValues();
final String key = fieldNames.indexName();
IndexFieldData<?> fieldData = loadedFieldData.get(key);
if (fieldData == null) {
fieldLoadingLock.acquire(key);
try {
fieldData = loadedFieldData.get(key);
if (fieldData == null) {
IndexFieldData.Builder builder = null;
String format = type.getFormat(indexSettings);
if (format != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(format) && !docValues) {
logger.warn("field [" + fieldNames.fullName() + "] has no doc values, will use default field data format");
format = null;
}
if (format != null) {
builder = buildersByTypeAndFormat.get(Tuple.tuple(type.getType(), format));
if (builder == null) {
logger.warn("failed to find format [" + format + "] for field [" + fieldNames.fullName() + "], will use default");
}
}
if (builder == null && docValues) {
builder = docValuesBuildersByType.get(type.getType());
}
if (builder == null) {
builder = buildersByType.get(type.getType());
}
if (builder == null) {
throw new IllegalArgumentException("failed to find field data builder for field " + fieldNames.fullName() + ", and type " + type.getType());
}
IndexFieldDataCache cache = fieldDataCaches.get(fieldNames.indexName());
if (cache == null) {
// we default to node level cache, which in turn defaults to be unbounded
// this means changing the node level settings is simple, just set the bounds there
String cacheType = type.getSettings().get("cache", indexSettings.get(FIELDDATA_CACHE_KEY, FIELDDATA_CACHE_VALUE_NODE));
if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) {
cache = indicesFieldDataCache.buildIndexFieldDataCache(indexService, index, fieldNames, type);
} else if ("none".equals(cacheType)){
cache = new IndexFieldDataCache.None();
} else {
throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldNames.fullName() + "]");
}
fieldDataCaches.put(fieldNames.indexName(), cache);
}
fieldData = builder.build(index, indexSettings, mapper, cache, circuitBreakerService, indexService.mapperService());
loadedFieldData.put(fieldNames.indexName(), fieldData);
}
} finally {
fieldLoadingLock.release(key);
}
}
return (IFD) fieldData;
}
}
| |
/*
* $Id: LuaObject.java,v 1.6 2006/12/22 14:06:40 thiago Exp $
* Copyright (C) 2003-2007 Kepler Project.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.keplerproject.luajava;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Proxy;
import java.util.StringTokenizer;
/**
* This class represents a Lua object of any type. A LuaObject is constructed by a {@link LuaState} object using one of
* the four methods:
* <ul>
* <li>{@link LuaState#getLuaObject(String globalName)}</li>
* <li>{@link LuaState#getLuaObject(LuaObject parent, String name)}</li>
* <li>{@link LuaState#getLuaObject(LuaObject parent, Number name)}</li>
* <li>{@link LuaState#getLuaObject(LuaObject parent, LuaObject name)}</li>
* <li>{@link LuaState#getLuaObject(int index)}</li>
* </ul>
* The LuaObject will represent only the object itself, not a variable or a stack index, so when you change a string,
* remember that strings are immutable objects in Lua, and the LuaObject you have will represent the old one.
*
* <h2>Proxies</h2>
*
* LuaJava allows you to implement a class in Lua, like said before. If you want to create this proxy from Java, you
* should have a LuaObject representing the table that has the functions that implement the interface. From this
* LuaObject you can call the <code>createProxy(String implements)</code>. This method receives the string with the
* name of the interfaces implemented by the object separated by comma.
*
* @author Rizzato
* @author Thiago Ponte
*/
public class LuaObject
{
protected Integer ref;
protected LuaState L;
/**
* Creates a reference to an object in the variable globalName
*
* @param L
* @param globalName
*/
protected LuaObject(LuaState L, String globalName)
{
synchronized (L)
{
this.L = L;
L.getGlobal(globalName);
registerValue(-1);
L.pop(1);
}
}
/**
* Creates a reference to an object inside another object
*
* @param parent
* The Lua Table or Userdata that contains the Field.
* @param name
* The name that index the field
*/
protected LuaObject(LuaObject parent, String name) throws LuaException
{
synchronized (parent.getLuaState())
{
this.L = parent.getLuaState();
if (!parent.isTable() && !parent.isUserdata())
{
throw new LuaException("Object parent should be a table or userdata .");
}
parent.push();
L.pushString(name);
L.getTable(-2);
L.remove(-2);
registerValue(-1);
L.pop(1);
}
}
/**
* This constructor creates a LuaObject from a table that is indexed by a number.
*
* @param parent
* The Lua Table or Userdata that contains the Field.
* @param name
* The name (number) that index the field
* @throws LuaException
* When the parent object isn't a Table or Userdata
*/
protected LuaObject(LuaObject parent, Number name) throws LuaException
{
synchronized (parent.getLuaState())
{
this.L = parent.getLuaState();
if (!parent.isTable() && !parent.isUserdata())
throw new LuaException("Object parent should be a table or userdata .");
parent.push();
L.pushNumber(name.doubleValue());
L.getTable(-2);
L.remove(-2);
registerValue(-1);
L.pop(1);
}
}
/**
* This constructor creates a LuaObject from a table that is indexed by a LuaObject.
*
* @param parent
* The Lua Table or Userdata that contains the Field.
* @param name
* The name (LuaObject) that index the field
* @throws LuaException
* When the parent object isn't a Table or Userdata
*/
protected LuaObject(LuaObject parent, LuaObject name) throws LuaException
{
if (parent.getLuaState() != name.getLuaState())
throw new LuaException("LuaStates must be the same!");
synchronized (parent.getLuaState())
{
if (!parent.isTable() && !parent.isUserdata())
throw new LuaException("Object parent should be a table or userdata .");
this.L = parent.getLuaState();
parent.push();
name.push();
L.getTable(-2);
L.remove(-2);
registerValue(-1);
L.pop(1);
}
}
/**
* Creates a reference to an object in the given index of the stack
*
* @param L
* @param index
* of the object on the lua stack
*/
protected LuaObject(LuaState L, int index)
{
synchronized (L)
{
this.L = L;
registerValue(index);
}
}
/**
* Gets the Object's State
*/
public LuaState getLuaState()
{
return L;
}
/**
* Creates the reference to the object in the registry table
*
* @param index
* of the object on the lua stack
*/
private void registerValue(int index)
{
synchronized (L)
{
L.pushValue(index);
int key = L.Lref(LuaState.LUA_REGISTRYINDEX.intValue());
ref = new Integer(key);
}
}
protected void finalize()
{
try
{
synchronized (L)
{
if (L.getCPtrPeer() != 0)
L.LunRef(LuaState.LUA_REGISTRYINDEX.intValue(), ref.intValue());
}
}
catch (Exception e)
{
System.err.println("Unable to release object " + ref);
}
}
/**
* Pushes the object represented by <code>this<code> into L's stack
*/
public void push()
{
L.rawGetI(LuaState.LUA_REGISTRYINDEX.intValue(), ref.intValue());
}
public boolean isNil()
{
synchronized (L)
{
push();
boolean bool = L.isNil(-1);
L.pop(1);
return bool;
}
}
public boolean isBoolean()
{
synchronized (L)
{
push();
boolean bool = L.isBoolean(-1);
L.pop(1);
return bool;
}
}
public boolean isNumber()
{
synchronized (L)
{
push();
boolean bool = L.isNumber(-1);
L.pop(1);
return bool;
}
}
public boolean isString()
{
synchronized (L)
{
push();
boolean bool = L.isString(-1);
L.pop(1);
return bool;
}
}
public boolean isFunction()
{
synchronized (L)
{
push();
boolean bool = L.isFunction(-1);
L.pop(1);
return bool;
}
}
public boolean isJavaObject()
{
synchronized (L)
{
push();
boolean bool = L.isObject(-1);
L.pop(1);
return bool;
}
}
public boolean isJavaFunction()
{
synchronized (L)
{
push();
boolean bool = L.isJavaFunction(-1);
L.pop(1);
return bool;
}
}
public boolean isTable()
{
synchronized (L)
{
push();
boolean bool = L.isTable(-1);
L.pop(1);
return bool;
}
}
public boolean isUserdata()
{
synchronized (L)
{
push();
boolean bool = L.isUserdata(-1);
L.pop(1);
return bool;
}
}
public int type()
{
synchronized (L)
{
push();
int type = L.type(-1);
L.pop(1);
return type;
}
}
public boolean getBoolean()
{
synchronized (L)
{
push();
boolean bool = L.toBoolean(-1);
L.pop(1);
return bool;
}
}
public double getNumber()
{
synchronized (L)
{
push();
double db = L.toNumber(-1);
L.pop(1);
return db;
}
}
public String getString()
{
synchronized (L)
{
push();
String str = L.toString(-1);
L.pop(1);
return str;
}
}
public Object getObject() throws LuaException
{
synchronized (L)
{
push();
Object obj = L.getObjectFromUserdata(-1);
L.pop(1);
return obj;
}
}
/**
* If <code>this<code> is a table or userdata tries to set
* a field value.
*/
public LuaObject getField(String field) throws LuaException
{
return L.getLuaObject(this, field);
}
/**
* Calls the object represented by <code>this</code> using Lua function pcall.
*
* @param args -
* Call arguments
* @param nres -
* Number of objects returned
* @return Object[] - Returned Objects
* @throws LuaException
*/
public Object[] call(Object[] args, int nres) throws LuaException
{
synchronized (L)
{
if (!isFunction() && !isTable() && !isUserdata())
throw new LuaException("Invalid object. Not a function, table or userdata .");
int top = L.getTop();
push();
int nargs;
if (args != null)
{
nargs = args.length;
for (int i = 0; i < nargs; i++)
{
Object obj = args[i];
L.pushObjectValue(obj);
}
}
else
nargs = 0;
int err = L.pcall(nargs, nres, 0);
if (err != 0)
{
String str;
if (L.isString(-1))
{
str = L.toString(-1);
L.pop(1);
}
else
str = "";
if (err == LuaState.LUA_ERRRUN.intValue())
{
str = "Runtime error. " + str;
}
else if (err == LuaState.LUA_ERRMEM.intValue())
{
str = "Memory allocation error. " + str;
}
else if (err == LuaState.LUA_ERRERR.intValue())
{
str = "Error while running the error handler function. " + str;
}
else
{
str = "Lua Error code " + err + ". " + str;
}
throw new LuaException(str);
}
if (nres == LuaState.LUA_MULTRET.intValue())
nres = L.getTop() - top;
if (L.getTop() - top < nres)
{
throw new LuaException("Invalid Number of Results .");
}
Object[] res = new Object[nres];
for (int i = nres; i > 0; i--)
{
res[i - 1] = L.toJavaObject(-1);
L.pop(1);
}
return res;
}
}
/**
* Calls the object represented by <code>this</code> using Lua function pcall. Returns 1 object
*
* @param args -
* Call arguments
* @return Object - Returned Object
* @throws LuaException
*/
public Object call(Object[] args) throws LuaException
{
return call(args, 1)[0];
}
public String toString()
{
synchronized (L)
{
try
{
if (isNil())
return "nil";
else if (isBoolean())
return String.valueOf(getBoolean());
else if (isNumber())
return String.valueOf(getNumber());
else if (isString())
return getString();
else if (isFunction())
return "Lua Function";
else if (isJavaObject())
return getObject().toString();
else if (isUserdata())
return "Userdata";
else if (isTable())
return "Lua Table";
else if (isJavaFunction())
return "Java Function";
else
return null;
}
catch (LuaException e)
{
return null;
}
}
}
/**
* Function that creates a java proxy to the object represented by <code>this</code>
*
* @param implem
* Interfaces that are implemented, separated by <code>,</code>
*/
public Object createProxy(String implem) throws ClassNotFoundException, LuaException
{
synchronized (L)
{
if (!isTable())
throw new LuaException("Invalid Object. Must be Table.");
StringTokenizer st = new StringTokenizer(implem, ",");
Class[] interfaces = new Class[st.countTokens()];
for (int i = 0; st.hasMoreTokens(); i++)
interfaces[i] = Class.forName(st.nextToken());
InvocationHandler handler = new LuaInvocationHandler(this);
return Proxy.newProxyInstance(this.getClass().getClassLoader(), interfaces, handler);
}
}
}
| |
package com.mojang.minecraft.level;
import com.mojang.minecraft.Entity;
import com.mojang.minecraft.model.Vector;
import com.mojang.minecraft.phys.AABB;
import com.mojang.minecraft.render.TextureManager;
import com.mojang.minecraft.render.ClippingHelper;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
public class BlockMap implements Serializable {
public static final long serialVersionUID = 0L;
private int width;
private int depth;
private int height;
private Slot slot = new Slot();
private Slot slot2 = new Slot();
public List<Entity>[] entityGrid;
public List<Entity> all = new ArrayList<Entity>();
private List<Entity> tmp = new ArrayList<Entity>();
@SuppressWarnings("unchecked")
public BlockMap(int width, int height, int depth) {
this.width = width / 16;
this.depth = height / 16;
this.height = depth / 16;
if (this.width == 0) {
this.width = 1;
}
if (this.depth == 0) {
this.depth = 1;
}
if (this.height == 0) {
this.height = 1;
}
this.entityGrid = new ArrayList[this.width * this.depth * this.height];
for (width = 0; width < this.width; ++width) {
for (height = 0; height < this.depth; ++height) {
for (depth = 0; depth < this.height; ++depth) {
this.entityGrid[(depth * this.depth + height) * this.width + width] = new ArrayList<Entity>();
}
}
}
}
public void insert(Entity entity) {
this.all.add(entity);
this.slot.init(this, entity.x, entity.y, entity.z).add(entity);
entity.xOld = entity.x;
entity.yOld = entity.y;
entity.zOld = entity.z;
entity.blockMap = this;
}
public void remove(Entity entity) {
this.slot.init(this, entity.xOld, entity.yOld, entity.zOld).remove(entity);
this.all.remove(entity);
}
public void moved(Entity entity) {
Slot var2 = this.slot.init(this, entity.xOld, entity.yOld, entity.zOld);
Slot var3 = this.slot2.init(this, entity.x, entity.y, entity.z);
if (!var2.equals(var3)) {
var2.remove(entity);
var3.add(entity);
entity.xOld = entity.x;
entity.yOld = entity.y;
entity.zOld = entity.z;
}
}
public List<Entity> getEntities(Entity exclude, float x, float y, float z, float x2, float y2, float z2) {
this.tmp.clear();
return this.getEntities(exclude, x, y, z, x2, y2, z2, this.tmp);
}
public List<Entity> getEntities(Entity exclude, float x, float y, float z, float x2, float y2, float z2, List<Entity> result) {
Slot slot = this.slot.init(this, x, y, z);
Slot slot2 = this.slot2.init(this, x2, y2, z2);
for (int var11 = slot.xSlot - 1; var11 <= slot2.xSlot + 1; ++var11) {
for (int var12 = slot.ySlot - 1; var12 <= slot2.ySlot + 1; ++var12) {
for (int var13 = slot.zSlot - 1; var13 <= slot2.zSlot + 1; ++var13) {
if (var11 >= 0 && var12 >= 0 && var13 >= 0 && var11 < this.width && var12 < this.depth && var13 < this.height) {
List<Entity> entities = this.entityGrid[(var13 * this.depth + var12) * this.width + var11];
for (Entity entity : entities) {
if (entity != exclude && entity.intersects(x, y, z, x2, y2, z2)) {
result.add(entity);
}
}
}
}
}
}
return result;
}
public void removeAllNonCreativeModeEntities() {
List<Entity> cache = new ArrayList<Entity>();
for (int x = 0; x < this.width; ++x) {
for (int z = 0; z < this.depth; ++z) {
for (int y = 0; y < this.height; ++y) {
List<Entity> entities = this.entityGrid[(y * this.depth + z) * this.width + x];
cache.addAll(entities);
for (Entity entity : cache) {
if (!entity.isCreativeModeAllowed()) {
entities.remove(entity);
}
}
cache.clear();
}
}
}
cache.addAll(this.all);
for(Entity entity : cache) {
if(!entity.isCreativeModeAllowed()) {
this.all.remove(entity);
}
}
}
public void clear() {
for (int x = 0; x < this.width; ++x) {
for (int var2 = 0; var2 < this.depth; ++var2) {
for (int var3 = 0; var3 < this.height; ++var3) {
this.entityGrid[(var3 * this.depth + var2) * this.width + x].clear();
}
}
}
}
public List<Entity> getEntities(Entity var1, AABB var2) {
this.tmp.clear();
return this.getEntities(var1, var2.x0, var2.y0, var2.z0, var2.x1, var2.y1, var2.z1, this.tmp);
}
public List<Entity> getEntities(Entity var1, AABB var2, List<Entity> var3) {
return this.getEntities(var1, var2.x0, var2.y0, var2.z0, var2.x1, var2.y1, var2.z1, var3);
}
public void tickAll() {
for (int var1 = 0; var1 < this.all.size(); ++var1) {
Entity var2;
(var2 = this.all.get(var1)).tick();
if (var2.removed) {
this.all.remove(var1--);
this.slot.init(this, var2.xOld, var2.yOld, var2.zOld).remove(var2);
} else {
int var3 = (int) (var2.xOld / 16.0F);
int var4 = (int) (var2.yOld / 16.0F);
int var5 = (int) (var2.zOld / 16.0F);
int var6 = (int) (var2.x / 16.0F);
int var7 = (int) (var2.y / 16.0F);
int var8 = (int) (var2.z / 16.0F);
if (var3 != var6 || var4 != var7 || var5 != var8) {
this.moved(var2);
}
}
}
}
public void render(Vector model, ClippingHelper var2, TextureManager textureManager, float var4) {
for (int var5 = 0; var5 < this.width; ++var5) {
float var6 = ((var5 << 4) - 2);
float var7 = ((var5 + 1 << 4) + 2);
for (int var8 = 0; var8 < this.depth; ++var8) {
float var9 = ((var8 << 4) - 2);
float var10 = ((var8 + 1 << 4) + 2);
for (int var11 = 0; var11 < this.height; ++var11) {
List<?> var12;
if ((var12 = this.entityGrid[(var11 * this.depth + var8) * this.width + var5]).size() != 0) {
float var13 = ((var11 << 4) - 2);
float var14 = ((var11 + 1 << 4) + 2);
if (var2.isBoxInFrustrum(var6, var9, var13, var7, var10, var14)) {
float var16 = var14;
float var17 = var10;
float var15 = var7;
var14 = var13;
var13 = var9;
float var18 = var6;
ClippingHelper var19 = var2;
int var20 = 0;
boolean var10000;
while (true) {
if (var20 >= 6) {
var10000 = true;
break;
}
if (var19.frustrum[var20][0] * var18 + var19.frustrum[var20][1] * var13 + var19.frustrum[var20][2] * var14 + var19.frustrum[var20][3] <= 0.0F) {
var10000 = false;
break;
}
if (var19.frustrum[var20][0] * var15 + var19.frustrum[var20][1] * var13 + var19.frustrum[var20][2] * var14 + var19.frustrum[var20][3] <= 0.0F) {
var10000 = false;
break;
}
if (var19.frustrum[var20][0] * var18 + var19.frustrum[var20][1] * var17 + var19.frustrum[var20][2] * var14 + var19.frustrum[var20][3] <= 0.0F) {
var10000 = false;
break;
}
if (var19.frustrum[var20][0] * var15 + var19.frustrum[var20][1] * var17 + var19.frustrum[var20][2] * var14 + var19.frustrum[var20][3] <= 0.0F) {
var10000 = false;
break;
}
if (var19.frustrum[var20][0] * var18 + var19.frustrum[var20][1] * var13 + var19.frustrum[var20][2] * var16 + var19.frustrum[var20][3] <= 0.0F) {
var10000 = false;
break;
}
if (var19.frustrum[var20][0] * var15 + var19.frustrum[var20][1] * var13 + var19.frustrum[var20][2] * var16 + var19.frustrum[var20][3] <= 0.0F) {
var10000 = false;
break;
}
if (var19.frustrum[var20][0] * var18 + var19.frustrum[var20][1] * var17 + var19.frustrum[var20][2] * var16 + var19.frustrum[var20][3] <= 0.0F) {
var10000 = false;
break;
}
if (var19.frustrum[var20][0] * var15 + var19.frustrum[var20][1] * var17 + var19.frustrum[var20][2] * var16 + var19.frustrum[var20][3] <= 0.0F) {
var10000 = false;
break;
}
++var20;
}
boolean var21 = var10000;
for (int var23 = 0; var23 < var12.size(); ++var23) {
Entity var22;
if ((var22 = (Entity) var12.get(var23)).shouldRender(model)) {
if (!var21) {
AABB var24 = var22.bb;
if (!var2.isBoxInFrustrum(var24.x0, var24.y0, var24.z0, var24.x1, var24.y1, var24.z1)) {
continue;
}
}
var22.render(textureManager, var4);
}
}
}
}
}
}
}
}
public static class Slot implements Serializable {
public static final long serialVersionUID = 0L;
private BlockMap parent;
private int xSlot;
private int ySlot;
private int zSlot;
public Slot init(BlockMap parent, float x, float y, float z) {
this.parent = parent;
this.xSlot = (int) (x / 16);
this.ySlot = (int) (y / 16);
this.zSlot = (int) (z / 16);
if (this.xSlot < 0) {
this.xSlot = 0;
}
if (this.ySlot < 0) {
this.ySlot = 0;
}
if (this.zSlot < 0) {
this.zSlot = 0;
}
if (this.xSlot >= parent.width) {
this.xSlot = parent.width - 1;
}
if (this.ySlot >= parent.depth) {
this.ySlot = parent.depth - 1;
}
if (this.zSlot >= parent.height) {
this.zSlot = parent.height - 1;
}
return this;
}
public void add(Entity entity) {
if (this.xSlot >= 0 && this.ySlot >= 0 && this.zSlot >= 0) {
parent.entityGrid[(this.zSlot * parent.depth + this.ySlot) * parent.width + this.xSlot].add(entity);
}
}
public void remove(Entity entity) {
if (this.xSlot >= 0 && this.ySlot >= 0 && this.zSlot >= 0) {
parent.entityGrid[(this.zSlot * parent.depth + this.ySlot) * parent.width + this.xSlot].remove(entity);
}
}
}
}
| |
package com.rockhoppertech.music.fx.cmn.model;
/*
* #%L
* Rocky Music FX
* %%
* Copyright (C) 1991 - 2014 Rockhopper Technologies
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rockhoppertech.music.Pitch;
import com.rockhoppertech.music.PitchFactory;
import static com.rockhoppertech.music.Pitch.*;
/**
* A note drawn in a staff using the alto clef.
*
* @author <a href="http://genedelisa.com/">Gene De Lisa</a>
*
*/
public class AltoNote {
final static Logger logger = LoggerFactory.getLogger(AltoNote.class);
static Map<Integer, Integer> flatLedgersMap = new HashMap<Integer, Integer>();
static Map<Integer, Integer> sharpLedgersMap = new HashMap<Integer, Integer>();
public static void main(String[] args) {
AltoNote n;
for (int i = 0; i < 121; i++)
System.out.println("note " + PitchFactory.getPitch(i) + " ledgers "
+ AltoNote.altoLedgersFlat[i]);
}
private static int pat[] = { 3, 4, 3, 4, 3 }; // sharps
// private static int pat2[] = { 4, 3, 3, 4, 3 }; // flats
private static int pat2[] = {2, 1, 4, 1, 2, 3};
// private static int pat3[] = { 3, // c b bb
// 4, // a af g gf
// 3, // f e ef
// 3, 4 };
public static final int[] altoLedgersFlat = new int[121];
public static final int[] altoLedgersSharp = new int[121];
static {
initMaps();
for (int i = 0; i < 121; i++)
altoLedgersFlat[i] = altoledgers(i, false);
for (int i = 0; i < 121; i++)
altoLedgersSharp[i] = altoledgers(i, true);
}
static int altoledgers(int num, boolean sharps) {
int ledgers = 0;
if(flatLedgersMap == null) {
System.err.println("map is null");
}
if(sharps == false) {
ledgers = flatLedgersMap.get(num);
return ledgers;
} else {
ledgers = sharpLedgersMap.get(num);
return ledgers;
}
// if (num >= Pitch.A5)
// ledgers = aboveAltoLedgers(num, sharps);
// else if (num <= Pitch.E4)
// ledgers = belowAltoLedgers(num, sharps);
// else
// return (0);
// return (ledgers);
}
static int aboveAltoLedgers(int num, boolean sharps) {
int j, k, n;
// transition notes where a new line is added
int[] trans = new int[70];
int ledgers = 0;
int gs = 10; // g sharp
int g = 79; // g natural
gs = Pitch.A5; // actually a5
g = Pitch.AF5; // actually
if (sharps) {
trans[0] = gs;
for (k = 1, j = gs; j < 127; j++, k++) {
trans[k] = trans[k - 1] + pat[(k - 1) % 5];
}
for (j = gs; j < num; j++) {
for (n = 0; n < k; n++)
if (j == trans[n]) {
ledgers++;
}
}
} else {
trans[0] = g;
for (k = 1, j = g; j < 127; j++, k++) {
trans[k] = trans[k - 1] + pat2[(k - 1) % 5];
}
for (j = g; j < num; j++) {
for (n = 0; n < k; n++)
if (j == trans[n]) {
ledgers++;
}
}
}
return (ledgers);
}
static int belowAltoLedgers(int num, boolean sharps) {
int j, k, n;
int[] trans = new int[70];
int ledgers = 0;
/*
* int d = 12; int df = 11;
*/
// int d = 41; // f
// int df = 40; //e
int d = Pitch.E4; // lowest without a ledger
// int df = Pitch.E3; // e
if (sharps) {
trans[0] = d;
for (k = 1, j = d; j > num; j--, k++) {
trans[k] = trans[k - 1] - pat[(k - 1) % 5];
}
for (j = d; j > num; j--) {
for (n = 0; n < k; n++)
if (j == trans[n]) {
ledgers++;
}
}
} else {
// the pattern is the number of ledgers in a row
// e.g. e ef d df all have 1 ledger in the bass clef
// int[] pattern = { 4, // e ef d df
// 3, // c b bb
// 4, // a af g gf
// 3, // f
// 3 }; // flats
int[] pattern = { 4, // e ef d df
3, // c b bb
4, // a af g gf
3, // f
3 }; // flats
trans[0] = Pitch.EF4;
// for(k=1, j=df; j> num; j--,k++) {
// for (k = 1, j = Pitch.F3; j > num; j--, k++) {
// trans[k] = trans[k - 1] - pattern[(k - 1) % 5];
// }
//
// // 41=f
// for (j = 41; j > num; j--) {
// for (n = 0; n < k; n++)
// if (j == trans[n]) {
// ledgers++;
// }
// }
trans[0] = Pitch.EF4;
for (k = 1, j = Pitch.EF4; j > num; j--, k++) {
trans[k] = trans[k - 1] - pattern[(k - 1) % 5];
}
for (j = Pitch.EF4; j > num; j--) {
for (n = 0; n < k; n++)
if (j == trans[n]) {
ledgers++;
}
}
if (logger.isDebugEnabled()) {
logger.debug(String.format("ledgers = %d", ledgers));
}
}
return (ledgers);
}
static void initMaps () {
for(int i = 0; i < 127 ; i++) {
flatLedgersMap.put(i, 0);
sharpLedgersMap.put(i, 0);
}
flatLedgersMap.put(BF5, 1);
flatLedgersMap.put(B5, 1);
flatLedgersMap.put(C6, 1);
flatLedgersMap.put(DF6, 2);
flatLedgersMap.put(D6, 2);
flatLedgersMap.put(EF6, 2);
flatLedgersMap.put(E6, 2);
flatLedgersMap.put(F6, 3);
flatLedgersMap.put(GF6, 3);
flatLedgersMap.put(G6, 3);
flatLedgersMap.put(AF6, 4);
flatLedgersMap.put(A6, 4);
flatLedgersMap.put(BF6, 4);
flatLedgersMap.put(B6, 4);
// not filled in for oct 7 and 8
flatLedgersMap.put(C7, 5);
flatLedgersMap.put(DF7, 1);
flatLedgersMap.put(D7, 2);
flatLedgersMap.put(EF7, 2);
flatLedgersMap.put(E7, 2);
flatLedgersMap.put(F7, 3);
flatLedgersMap.put(GF7, 3);
flatLedgersMap.put(G7, 3);
flatLedgersMap.put(AF7, 4);
flatLedgersMap.put(A7, 4);
flatLedgersMap.put(BF7, 4);
flatLedgersMap.put(B7, 4);
flatLedgersMap.put(C8, 8);
flatLedgersMap.put(DF8, 8);
flatLedgersMap.put(D8, 2);
flatLedgersMap.put(EF8, 2);
flatLedgersMap.put(E8, 2);
flatLedgersMap.put(F8, 3);
flatLedgersMap.put(GF8, 3);
flatLedgersMap.put(G8, 3);
flatLedgersMap.put(AF8, 4);
flatLedgersMap.put(A8, 4);
flatLedgersMap.put(BF8, 4);
flatLedgersMap.put(B8, 4);
flatLedgersMap.put(D4, 1);
flatLedgersMap.put(DF4, 1);
flatLedgersMap.put(C4, 1);
flatLedgersMap.put(DF4, 1);
flatLedgersMap.put(D4, 1);
flatLedgersMap.put(EF4, 0);
// these are all 0
// flatLedgersMap.put(E4, 2);
// flatLedgersMap.put(F4, 4);
// flatLedgersMap.put(GF4, 4);
// flatLedgersMap.put(G4, 4);
// flatLedgersMap.put(AF4, 4);
// flatLedgersMap.put(A4, 4);
// flatLedgersMap.put(BF4, 4);
// flatLedgersMap.put(B4, 4);
flatLedgersMap.put(C3, 5);
flatLedgersMap.put(DF3, 4);
flatLedgersMap.put(D3, 4);
flatLedgersMap.put(EF3, 4);
flatLedgersMap.put(E3, 4);
flatLedgersMap.put(F3, 3);
flatLedgersMap.put(GF3, 3);
flatLedgersMap.put(G3, 3);
flatLedgersMap.put(AF3, 2);
flatLedgersMap.put(A3, 2);
flatLedgersMap.put(BF3, 2);
flatLedgersMap.put(B3, 2);
// wrong after here
flatLedgersMap.put(C2, 1);
flatLedgersMap.put(DF2, 1);
flatLedgersMap.put(D2, 2);
flatLedgersMap.put(EF2, 2);
flatLedgersMap.put(E2, 2);
flatLedgersMap.put(F2, 3);
flatLedgersMap.put(GF2, 3);
flatLedgersMap.put(G2, 3);
flatLedgersMap.put(AF2, 4);
flatLedgersMap.put(A2, 4);
flatLedgersMap.put(BF2, 4);
flatLedgersMap.put(B2, 4);
flatLedgersMap.put(C1, 1);
flatLedgersMap.put(DF1, 1);
flatLedgersMap.put(D1, 2);
flatLedgersMap.put(EF1, 2);
flatLedgersMap.put(E1, 2);
flatLedgersMap.put(F1, 3);
flatLedgersMap.put(GF1, 3);
flatLedgersMap.put(G1, 3);
flatLedgersMap.put(AF1, 4);
flatLedgersMap.put(A1, 4);
flatLedgersMap.put(BF1, 4);
flatLedgersMap.put(B1, 4);
flatLedgersMap.put(C0, 0);
flatLedgersMap.put(DF0, 0);
flatLedgersMap.put(D0, 2);
flatLedgersMap.put(EF0, 2);
flatLedgersMap.put(E0, 2);
flatLedgersMap.put(F0, 3);
flatLedgersMap.put(GF0, 3);
flatLedgersMap.put(G0, 3);
flatLedgersMap.put(AF0, 4);
flatLedgersMap.put(A0, 4);
flatLedgersMap.put(BF0, 4);
flatLedgersMap.put(B0, 4);
//
sharpLedgersMap.put(C0, 0);
sharpLedgersMap.put(CS0, 0);
sharpLedgersMap.put(D0, 2);
sharpLedgersMap.put(DS0, 2);
sharpLedgersMap.put(E0, 2);
sharpLedgersMap.put(F0, 3);
sharpLedgersMap.put(FS0, 3);
sharpLedgersMap.put(G0, 3);
sharpLedgersMap.put(GS0, 4);
sharpLedgersMap.put(A0, 4);
sharpLedgersMap.put(AS0, 4);
sharpLedgersMap.put(B0, 4);
sharpLedgersMap.put(C1, 1);
sharpLedgersMap.put(CS1, 1);
sharpLedgersMap.put(D1, 2);
sharpLedgersMap.put(DS1, 2);
sharpLedgersMap.put(E1, 2);
sharpLedgersMap.put(F1, 3);
sharpLedgersMap.put(FS1, 3);
sharpLedgersMap.put(G1, 3);
sharpLedgersMap.put(GS1, 4);
sharpLedgersMap.put(A1, 4);
sharpLedgersMap.put(AS1, 4);
sharpLedgersMap.put(B1, 4);
sharpLedgersMap.put(C2, 2);
sharpLedgersMap.put(CS2, 2);
sharpLedgersMap.put(D2, 2);
sharpLedgersMap.put(DS2, 2);
sharpLedgersMap.put(E2, 2);
sharpLedgersMap.put(F2, 3);
sharpLedgersMap.put(FS2, 3);
sharpLedgersMap.put(G2, 3);
sharpLedgersMap.put(GS2, 4);
sharpLedgersMap.put(A2, 4);
sharpLedgersMap.put(AS2, 4);
sharpLedgersMap.put(B2, 6);
// above here wrong, fix oct 2,1, and 0
sharpLedgersMap.put(C3, 5);
sharpLedgersMap.put(CS3, 5);
sharpLedgersMap.put(D3, 4);
sharpLedgersMap.put(DS3, 4);
sharpLedgersMap.put(E3, 4);
sharpLedgersMap.put(F3, 3);
sharpLedgersMap.put(FS3, 3);
sharpLedgersMap.put(G3, 3);
sharpLedgersMap.put(GS3, 3);
sharpLedgersMap.put(A3, 2);
sharpLedgersMap.put(AS3, 2);
sharpLedgersMap.put(B3, 2);
sharpLedgersMap.put(C4, 1);
sharpLedgersMap.put(CS4, 1);
sharpLedgersMap.put(D4, 1);
sharpLedgersMap.put(DS4, 1);
sharpLedgersMap.put(E4, 0);
sharpLedgersMap.put(F4, 0);
sharpLedgersMap.put(FS4, 0);
sharpLedgersMap.put(G4, 0);
sharpLedgersMap.put(GS4, 0);
sharpLedgersMap.put(A4, 0);
sharpLedgersMap.put(AS4, 0);
sharpLedgersMap.put(B4, 0);
sharpLedgersMap.put(C5, 0);
sharpLedgersMap.put(CS5, 0);
sharpLedgersMap.put(D5, 0);
sharpLedgersMap.put(DS5, 0);
sharpLedgersMap.put(E5, 0);
sharpLedgersMap.put(F5, 0);
sharpLedgersMap.put(FS5, 0);
sharpLedgersMap.put(G5, 0);
sharpLedgersMap.put(GS5, 0);
sharpLedgersMap.put(A5, 0);
sharpLedgersMap.put(AS5, 0);
sharpLedgersMap.put(B5, 1);
sharpLedgersMap.put(C6, 1);
sharpLedgersMap.put(CS6, 1);
sharpLedgersMap.put(D6, 2);
sharpLedgersMap.put(DS6, 2);
sharpLedgersMap.put(E6, 3);
sharpLedgersMap.put(F6, 4);
sharpLedgersMap.put(FS6, 3);
sharpLedgersMap.put(G6, 3);
sharpLedgersMap.put(GS6, 3);
sharpLedgersMap.put(A6, 4);
sharpLedgersMap.put(AS6, 4);
sharpLedgersMap.put(B6, 4);
sharpLedgersMap.put(C7, 5);
sharpLedgersMap.put(CS7, 5); // fix after here
sharpLedgersMap.put(D7, 2);
sharpLedgersMap.put(DS7, 2);
sharpLedgersMap.put(E7, 2);
sharpLedgersMap.put(F7, 3);
sharpLedgersMap.put(FS7, 3);
sharpLedgersMap.put(G7, 3);
sharpLedgersMap.put(GS7, 4);
sharpLedgersMap.put(A7, 4);
sharpLedgersMap.put(AS7, 4);
sharpLedgersMap.put(B7, 4);
sharpLedgersMap.put(C8, 8);
sharpLedgersMap.put(CS8, 8);
sharpLedgersMap.put(D8, 2);
sharpLedgersMap.put(DS8, 2);
sharpLedgersMap.put(E8, 2);
sharpLedgersMap.put(F8, 3);
sharpLedgersMap.put(FS8, 3);
sharpLedgersMap.put(G8, 3);
sharpLedgersMap.put(GS8, 4);
sharpLedgersMap.put(A8, 4);
sharpLedgersMap.put(AS8, 4);
sharpLedgersMap.put(B8, 4);
}
}
| |
package org.metaborg.spoofax.eclipse.editor;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.resource.JFaceResources;
import org.eclipse.jface.text.AbstractInformationControl;
import org.eclipse.jface.text.AbstractInformationControlManager;
import org.eclipse.jface.text.IInformationControl;
import org.eclipse.jface.text.IInformationControlCreator;
import org.eclipse.jface.text.IInformationControlExtension2;
import org.eclipse.jface.text.IInformationControlExtension3;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.ITextHover;
import org.eclipse.jface.text.ITextHoverExtension;
import org.eclipse.jface.text.ITextHoverExtension2;
import org.eclipse.jface.text.ITextViewer;
import org.eclipse.jface.text.IWidgetTokenKeeper;
import org.eclipse.jface.text.IWidgetTokenKeeperExtension;
import org.eclipse.jface.text.IWidgetTokenOwner;
import org.eclipse.jface.text.IWidgetTokenOwnerExtension;
import org.eclipse.jface.text.JFaceTextUtil;
import org.eclipse.jface.text.Region;
import org.eclipse.jface.text.hyperlink.DefaultHyperlinkPresenter;
import org.eclipse.jface.text.hyperlink.IHyperlink;
import org.eclipse.jface.text.hyperlink.IHyperlinkPresenterExtension2;
import org.eclipse.jface.util.Geometry;
import org.eclipse.jface.util.Util;
import org.eclipse.jface.viewers.ColumnLabelProvider;
import org.eclipse.jface.viewers.IStructuredContentProvider;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.KeyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.MouseListener;
import org.eclipse.swt.events.MouseMoveListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.events.TraverseEvent;
import org.eclipse.swt.events.TraverseListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableItem;
/**
* This class is entirely copied from org.eclipse.jface.text.hyperlink.MultipleHyperlinkPresenter
* We even had to copy another class which wasn't public: org.eclipse.jface.text.hyperlink.HyperlinkMessages
* The reason to do so is we want basically the functionality of this class, except not show a hyperlink on
* the text is there are multiple hyperlinks. So in the method showHyperlinks(IHyperlink[], boolean) the
* super call to the DefaultHyperlinkPresenter has been moved to an if statement so a hyperlink is show iff
* there is _one_ hyperlink, XOR a menu is shown when there are multiple.
*/
public class MetaborgMultipleHyperlinkPresenter extends DefaultHyperlinkPresenter implements IHyperlinkPresenterExtension2 {
private static final boolean IS_OLD_WINDOWS;
static {
int majorVersion= Integer.MAX_VALUE;
if (Util.isWin32()) {
String osVersion= System.getProperty("os.version"); //$NON-NLS-1$
if (osVersion != null) {
int majorIndex = osVersion.indexOf('.');
if (majorIndex != -1) {
osVersion = osVersion.substring(0, majorIndex);
try {
majorVersion= Integer.parseInt(osVersion);
} catch (NumberFormatException exception) {
// use default
}
}
}
}
IS_OLD_WINDOWS= majorVersion < 6; // before Vista (6.0)
}
private static final boolean IS_MAC= Util.isMac();
private static final boolean IS_GTK= Util.isGtk();
private static class HyperlinkMessages {
private static final String BUNDLE_NAME= HyperlinkMessages.class.getName();
private static final ResourceBundle RESOURCE_BUNDLE= ResourceBundle.getBundle(BUNDLE_NAME);
private HyperlinkMessages() {
}
/**
* Gets a string from the resource bundle.
*
* @param key the string used to get the bundle value, must not be
* <code>null</code>
* @return the string from the resource bundle
*/
public static String getString(String key) {
try {
return RESOURCE_BUNDLE.getString(key);
} catch (MissingResourceException e) {
return '!' + key + '!';
}
}
}
/**
* An information control capable of showing a list of hyperlinks. The hyperlinks can be opened.
*/
private static class LinkListInformationControl extends AbstractInformationControl implements IInformationControlExtension2 {
private static final class LinkContentProvider implements IStructuredContentProvider {
@Override
public Object[] getElements(Object inputElement) {
return (Object[]) inputElement;
}
@Override
public void dispose() {
}
@Override
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
}
}
private static final class LinkLabelProvider extends ColumnLabelProvider {
@Override
public String getText(Object element) {
IHyperlink link= (IHyperlink)element;
String text= link.getHyperlinkText();
if (text != null)
return text;
return HyperlinkMessages.getString("LinkListInformationControl.unknownLink"); //$NON-NLS-1$
}
}
private final MultipleHyperlinkHoverManager fManager;
private IHyperlink[] fInput;
private Composite fParent;
private Table fTable;
private final Color fForegroundColor;
private final Color fBackgroundColor;
/**
* Creates a link list information control with the given shell as parent.
*
* @param parentShell the parent shell
* @param manager the hover manager
* @param foregroundColor the foreground color, must not be disposed
* @param backgroundColor the background color, must not be disposed
*/
public LinkListInformationControl(Shell parentShell, MultipleHyperlinkHoverManager manager, Color foregroundColor, Color backgroundColor) {
super(parentShell, false);
fManager= manager;
fForegroundColor= foregroundColor;
fBackgroundColor= backgroundColor;
create();
}
@Override
public void setInformation(String information) {
//replaced by IInformationControlExtension2#setInput(java.lang.Object)
}
@Override
public void setInput(Object input) {
fInput= (IHyperlink[]) input;
deferredCreateContent(fParent);
}
@Override
protected void createContent(Composite parent) {
fParent= parent;
GridLayout layout= new GridLayout();
if (IS_OLD_WINDOWS) {
layout.marginWidth= 0;
layout.marginHeight= 4;
layout.marginRight= 4;
} else if (IS_MAC) {
layout.marginWidth= 4;
layout.marginHeight= 0;
layout.marginTop= 4;
layout.marginBottom= 4 - 1;
} else if (IS_GTK) {
layout.marginWidth= 4;
layout.marginHeight= 0;
layout.marginTop= 4;
layout.marginBottom= 4 - 2;
} else {
layout.marginWidth= 4;
layout.marginHeight= 4;
}
fParent.setLayout(layout);
fParent.setForeground(fForegroundColor);
fParent.setBackground(fBackgroundColor);
}
@Override
public Point computeSizeHint() {
Point preferedSize= getShell().computeSize(SWT.DEFAULT, SWT.DEFAULT, true);
Point constraints= getSizeConstraints();
if (constraints == null)
return preferedSize;
if (fTable.getVerticalBar() == null || fTable.getHorizontalBar() == null)
return Geometry.min(constraints, preferedSize);
int scrollBarWidth= fTable.getVerticalBar().getSize().x;
int scrollBarHeight= fTable.getHorizontalBar().getSize().y;
if (IS_MAC && fTable.getScrollbarsMode() == SWT.SCROLLBAR_OVERLAY) {
// workaround for https://bugs.eclipse.org/387732 : [10.8] Table scrollbar width is 16 (not 15) on Mountain Lion
scrollBarWidth--;
scrollBarHeight--;
}
int width;
if (preferedSize.y - scrollBarHeight <= constraints.y) {
width= preferedSize.x - scrollBarWidth;
fTable.getVerticalBar().setVisible(false);
} else {
width= Math.min(preferedSize.x, constraints.x);
}
int height;
if (preferedSize.x - scrollBarWidth <= constraints.x) {
height= preferedSize.y - scrollBarHeight;
fTable.getHorizontalBar().setVisible(false);
} else {
height= Math.min(preferedSize.y, constraints.y);
}
return new Point(width, height);
}
private void deferredCreateContent(Composite parent) {
fTable= new Table(parent, SWT.SINGLE | SWT.FULL_SELECTION);
fTable.setLinesVisible(false);
fTable.setHeaderVisible(false);
fTable.setForeground(fForegroundColor);
fTable.setBackground(fBackgroundColor);
fTable.setFont(JFaceResources.getDialogFont());
GridData data= new GridData(SWT.BEGINNING, SWT.BEGINNING, true, true);
fTable.setLayoutData(data);
final TableViewer viewer= new TableViewer(fTable);
viewer.setContentProvider(new LinkContentProvider());
viewer.setLabelProvider(new LinkLabelProvider());
viewer.setInput(fInput);
fTable.setSelection(0);
registerTableListeners();
getShell().addShellListener(new ShellAdapter() {
@Override
public void shellActivated(ShellEvent e) {
if (viewer.getTable().getSelectionCount() == 0) {
viewer.getTable().setSelection(0);
}
viewer.getTable().setFocus();
}
});
}
private void registerTableListeners() {
fTable.addMouseMoveListener(new MouseMoveListener() {
TableItem fLastItem= null;
@Override
public void mouseMove(MouseEvent e) {
if (fTable.equals(e.getSource())) {
Object o= fTable.getItem(new Point(e.x, e.y));
if (fLastItem == null ^ o == null) {
fTable.setCursor(o == null ? null : fTable.getDisplay().getSystemCursor(SWT.CURSOR_HAND));
}
if (o instanceof TableItem) {
TableItem item= (TableItem) o;
if (!o.equals(fLastItem)) {
fLastItem= (TableItem) o;
fTable.setSelection(new TableItem[] { fLastItem });
} else if (e.y < fTable.getItemHeight() / 4) {
// Scroll up
int index= fTable.indexOf(item);
if (index > 0) {
fLastItem= fTable.getItem(index - 1);
fTable.setSelection(new TableItem[] { fLastItem });
}
} else if (e.y > fTable.getBounds().height - fTable.getItemHeight() / 4) {
// Scroll down
int index= fTable.indexOf(item);
if (index < fTable.getItemCount() - 1) {
fLastItem= fTable.getItem(index + 1);
fTable.setSelection(new TableItem[] { fLastItem });
}
}
} else if (o == null) {
fLastItem= null;
}
}
}
});
fTable.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetDefaultSelected(SelectionEvent e) {
openSelectedLink();
}
});
fTable.addMouseListener(new MouseAdapter() {
@Override
public void mouseUp(MouseEvent e) {
if (fTable.getSelectionCount() < 1)
return;
if (e.button != 1)
return;
if (fTable.equals(e.getSource())) {
Object o= fTable.getItem(new Point(e.x, e.y));
TableItem selection= fTable.getSelection()[0];
if (selection.equals(o))
openSelectedLink();
}
}
});
fTable.addTraverseListener(new TraverseListener() {
@Override
public void keyTraversed(TraverseEvent e) {
if (e.keyCode == SWT.ESC) {
fManager.hideInformationControl();
}
}
});
}
@Override
public boolean hasContents() {
return true;
}
/**
* Opens the currently selected link.
*/
private void openSelectedLink() {
if (fTable.getSelectionCount() < 1)
return;
TableItem selection= fTable.getSelection()[0];
IHyperlink link= (IHyperlink)selection.getData();
fManager.hideInformationControl();
fManager.setCaret();
link.open();
}
}
private class MultipleHyperlinkHover implements ITextHover, ITextHoverExtension, ITextHoverExtension2 {
/**
* @see org.eclipse.jface.text.ITextHover#getHoverInfo(org.eclipse.jface.text.ITextViewer, org.eclipse.jface.text.IRegion)
* @deprecated As of 3.4, replaced by
* {@link ITextHoverExtension2#getHoverInfo2(ITextViewer, IRegion)}
*/
@Deprecated
@Override
public String getHoverInfo(ITextViewer textViewer, IRegion hoverRegion) {
return null;
}
@Override
public IRegion getHoverRegion(ITextViewer textViewer, int offset) {
return fSubjectRegion;
}
@Override
public Object getHoverInfo2(ITextViewer textViewer, IRegion hoverRegion) {
return fHyperlinks;
}
@Override
public IInformationControlCreator getHoverControlCreator() {
return new IInformationControlCreator() {
@Override
public IInformationControl createInformationControl(Shell parent) {
Color foregroundColor= fTextViewer.getTextWidget().getForeground();
Color backgroundColor= fTextViewer.getTextWidget().getBackground();
return new LinkListInformationControl(parent, fManager, foregroundColor, backgroundColor);
}
};
}
}
private static class MultipleHyperlinkHoverManager extends AbstractInformationControlManager implements IWidgetTokenKeeper, IWidgetTokenKeeperExtension {
private class Closer implements IInformationControlCloser, Listener, KeyListener, MouseListener {
private Control fSubjectControl;
private Display fDisplay;
private IInformationControl fControl;
private Rectangle fSubjectArea;
@Override
public void setInformationControl(IInformationControl control) {
fControl= control;
}
@Override
public void setSubjectControl(Control subject) {
fSubjectControl= subject;
}
@Override
public void start(Rectangle subjectArea) {
fSubjectArea= subjectArea;
fDisplay= fSubjectControl.getDisplay();
if (!fDisplay.isDisposed()) {
fDisplay.addFilter(SWT.FocusOut, this);
fDisplay.addFilter(SWT.MouseMove, this);
fTextViewer.getTextWidget().addKeyListener(this);
fTextViewer.getTextWidget().addMouseListener(this);
}
}
@Override
public void stop() {
if (fDisplay != null && !fDisplay.isDisposed()) {
fDisplay.removeFilter(SWT.FocusOut, this);
fDisplay.removeFilter(SWT.MouseMove, this);
fTextViewer.getTextWidget().removeKeyListener(this);
fTextViewer.getTextWidget().removeMouseListener(this);
}
fSubjectArea= null;
}
@Override
public void handleEvent(Event event) {
switch (event.type) {
case SWT.FocusOut:
if (!fControl.isFocusControl())
disposeInformationControl();
break;
case SWT.MouseMove:
handleMouseMove(event);
break;
}
}
/**
* Handle mouse movement events.
*
* @param event the event
*/
private void handleMouseMove(Event event) {
if (!(event.widget instanceof Control))
return;
if (fControl.isFocusControl())
return;
Control eventControl= (Control) event.widget;
//transform coordinates to subject control:
Point mouseLoc= event.display.map(eventControl, fSubjectControl, event.x, event.y);
if (fSubjectArea.contains(mouseLoc))
return;
if (inKeepUpZone(mouseLoc.x, mouseLoc.y, ((IInformationControlExtension3) fControl).getBounds()))
return;
if (!isTakingFocusWhenVisible())
hideInformationControl();
}
/**
* Tests whether a given mouse location is within the keep-up zone.
* The hover should not be hidden as long as the mouse stays inside this zone.
*
* @param x the x coordinate, relative to the <em>subject control</em>
* @param y the y coordinate, relative to the <em>subject control</em>
* @param controlBounds the bounds of the current control
*
* @return <code>true</code> iff the mouse event occurred in the keep-up zone
*/
private boolean inKeepUpZone(int x, int y, Rectangle controlBounds) {
// +-----------+
// |subjectArea|
// +-----------+
// |also keepUp|
// ++-----------+-------+
// | totalBounds |
// +--------------------+
if (fSubjectArea.contains(x, y))
return true;
Rectangle iControlBounds= fSubjectControl.getDisplay().map(null, fSubjectControl, controlBounds);
Rectangle totalBounds= Geometry.copy(iControlBounds);
if (totalBounds.contains(x, y))
return true;
int keepUpY= fSubjectArea.y + fSubjectArea.height;
Rectangle alsoKeepUp= new Rectangle(fSubjectArea.x, keepUpY, fSubjectArea.width, totalBounds.y - keepUpY);
return alsoKeepUp.contains(x, y);
}
@Override
public void keyPressed(KeyEvent e) {
hideInformationControl();
}
@Override
public void keyReleased(KeyEvent e) {
if (!isTakingFocusWhenVisible())
hideInformationControl();
}
@Override
public void mouseDoubleClick(MouseEvent e) {
}
@Override
public void mouseDown(MouseEvent e) {
}
@Override
public void mouseUp(MouseEvent e) {
hideInformationControl();
}
}
/**
* Priority of the hover managed by this manager.
* Default value: One higher then for the hovers
* managed by TextViewerHoverManager.
*/
private static final int WIDGET_TOKEN_PRIORITY= 1;
private final MultipleHyperlinkHover fHover;
private final ITextViewer fTextViewer;
private final MetaborgMultipleHyperlinkPresenter fHyperlinkPresenter;
private final Closer fCloser;
private boolean fIsControlVisible;
/**
* Create a new MultipleHyperlinkHoverManager. The MHHM can show and hide
* the given MultipleHyperlinkHover inside the given ITextViewer.
*
* @param hover the hover to manage
* @param viewer the viewer to show the hover in
* @param metaborgMultipleHyperlinkPresenter the hyperlink presenter using this manager to present hyperlinks
*/
public MultipleHyperlinkHoverManager(MultipleHyperlinkHover hover, ITextViewer viewer, MetaborgMultipleHyperlinkPresenter metaborgMultipleHyperlinkPresenter) {
super(hover.getHoverControlCreator());
fHover= hover;
fTextViewer= viewer;
fHyperlinkPresenter= metaborgMultipleHyperlinkPresenter;
fCloser= new Closer();
setCloser(fCloser);
fIsControlVisible= false;
}
@Override
protected void computeInformation() {
IRegion region= fHover.getHoverRegion(fTextViewer, -1);
if (region == null) {
setInformation(null, null);
return;
}
Rectangle area= JFaceTextUtil.computeArea(region, fTextViewer);
if (area == null || area.isEmpty()) {
setInformation(null, null);
return;
}
Object information= fHover.getHoverInfo2(fTextViewer, region);
setCustomInformationControlCreator(fHover.getHoverControlCreator());
setInformation(information, area);
}
@Override
protected Point computeInformationControlLocation(Rectangle subjectArea, Point controlSize) {
Point result= super.computeInformationControlLocation(subjectArea, controlSize);
Point cursorLocation= fTextViewer.getTextWidget().getDisplay().getCursorLocation();
if (isTakingFocusWhenVisible() || cursorLocation.x <= result.x + controlSize.x)
return result;
result.x= cursorLocation.x + 20 - controlSize.x;
return result;
}
@Override
protected void showInformationControl(Rectangle subjectArea) {
if (fTextViewer instanceof IWidgetTokenOwnerExtension) {
if (((IWidgetTokenOwnerExtension)fTextViewer).requestWidgetToken(this, WIDGET_TOKEN_PRIORITY)) {
super.showInformationControl(subjectArea);
fIsControlVisible= true;
}
} else if (fTextViewer instanceof IWidgetTokenOwner) {
if (((IWidgetTokenOwner)fTextViewer).requestWidgetToken(this)) {
super.showInformationControl(subjectArea);
fIsControlVisible= true;
}
} else {
super.showInformationControl(subjectArea);
fIsControlVisible= true;
}
}
/**
* Sets the caret where hyperlinking got initiated.
*
* @since 3.5
*/
private void setCaret() {
fHyperlinkPresenter.setCaret();
}
@Override
protected void hideInformationControl() {
super.hideInformationControl();
if (fTextViewer instanceof IWidgetTokenOwner) {
((IWidgetTokenOwner) fTextViewer).releaseWidgetToken(this);
}
fIsControlVisible= false;
fHyperlinkPresenter.hideHyperlinks();
}
@Override
public void disposeInformationControl() {
super.disposeInformationControl();
if (fTextViewer instanceof IWidgetTokenOwner) {
((IWidgetTokenOwner) fTextViewer).releaseWidgetToken(this);
}
fIsControlVisible= false;
fHyperlinkPresenter.hideHyperlinks();
}
@Override
public boolean requestWidgetToken(IWidgetTokenOwner owner) {
hideInformationControl();
return true;
}
@Override
public boolean requestWidgetToken(IWidgetTokenOwner owner, int priority) {
if (priority < WIDGET_TOKEN_PRIORITY)
return false;
hideInformationControl();
return true;
}
@Override
public boolean setFocus(IWidgetTokenOwner owner) {
return isTakingFocusWhenVisible();
}
/**
* Returns <code>true</code> if the information control managed by
* this manager is visible, <code>false</code> otherwise.
*
* @return <code>true</code> if information control is visible
*/
public boolean isInformationControlVisible() {
return fIsControlVisible;
}
}
private ITextViewer fTextViewer;
private IHyperlink[] fHyperlinks;
private Region fSubjectRegion;
private MultipleHyperlinkHoverManager fManager;
/**
* The offset in the text viewer where hyperlinking got initiated.
* @since 3.5
*/
private int fCursorOffset;
/**
* Creates a new multiple hyperlink presenter which uses {@link #HYPERLINK_COLOR} to read the
* color from the given preference store.
*
* @param store the preference store
*/
public MetaborgMultipleHyperlinkPresenter(IPreferenceStore store) {
super(store);
}
/**
* Creates a new multiple hyperlink presenter.
*
* @param color the hyperlink color, to be disposed by the caller
*/
public MetaborgMultipleHyperlinkPresenter(RGB color) {
super(color);
}
@Override
public void install(ITextViewer viewer) {
super.install(viewer);
fTextViewer= viewer;
fManager= new MultipleHyperlinkHoverManager(new MultipleHyperlinkHover(), fTextViewer, this);
fManager.install(viewer.getTextWidget());
fManager.setSizeConstraints(100, 12, false, true);
}
@Override
public void uninstall() {
super.uninstall();
if (fTextViewer != null) {
fManager.dispose();
fTextViewer= null;
}
}
@Override
public boolean canShowMultipleHyperlinks() {
return true;
}
@Override
public boolean canHideHyperlinks() {
return !fManager.isInformationControlVisible();
}
@Override
public void hideHyperlinks() {
super.hideHyperlinks();
fHyperlinks= null;
}
@Override
public void showHyperlinks(IHyperlink[] hyperlinks) {
showHyperlinks(hyperlinks, false);
}
/**
* {@inheritDoc}
*
* @since 3.7
*/
@Override
public void showHyperlinks(IHyperlink[] activeHyperlinks, boolean takesFocusWhenVisible) {
fManager.takesFocusWhenVisible(takesFocusWhenVisible);
fSubjectRegion= null;
fHyperlinks= activeHyperlinks;
if (activeHyperlinks.length == 1) {
super.showHyperlinks(new IHyperlink[] { activeHyperlinks[0] });
return;
}
int start= activeHyperlinks[0].getHyperlinkRegion().getOffset();
int end= start + activeHyperlinks[0].getHyperlinkRegion().getLength();
for (int i= 1; i < activeHyperlinks.length; i++) {
int hstart= activeHyperlinks[i].getHyperlinkRegion().getOffset();
int hend= hstart + activeHyperlinks[i].getHyperlinkRegion().getLength();
start= Math.min(start, hstart);
end= Math.max(end, hend);
}
fSubjectRegion= new Region(start, end - start);
fCursorOffset= JFaceTextUtil.getOffsetForCursorLocation(fTextViewer);
fManager.showInformation();
}
/**
* Sets the caret where hyperlinking got initiated.
*
* @since 3.5
*/
private void setCaret() {
Point selectedRange= fTextViewer.getSelectedRange();
if (fCursorOffset != -1 && !(fSubjectRegion.getOffset() <= selectedRange.x && selectedRange.x + selectedRange.y <= fSubjectRegion.getOffset() + fSubjectRegion.getLength()))
fTextViewer.setSelectedRange(fCursorOffset, 0);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.deltaspike.test.jsf.impl.config.view.navigation.syntax.uc011;
import org.apache.deltaspike.core.api.config.view.metadata.ConfigDescriptor;
import org.apache.deltaspike.core.api.config.view.metadata.ViewConfigDescriptor;
import org.apache.deltaspike.core.api.config.view.metadata.ViewConfigResolver;
import org.apache.deltaspike.core.spi.config.view.ViewConfigNode;
import org.apache.deltaspike.jsf.api.config.view.Folder;
import org.apache.deltaspike.jsf.api.config.view.View;
import org.apache.deltaspike.jsf.impl.config.view.ViewConfigExtension;
import org.apache.deltaspike.jsf.impl.config.view.ViewConfigResolverProducer;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class ViewConfigTest
{
private ViewConfigExtension viewConfigExtension;
private ViewConfigResolverProducer viewConfigResolverProducer;
@Before
public void before()
{
this.viewConfigExtension = new ViewConfigExtension();
this.viewConfigResolverProducer = new ViewConfigResolverProducer(this.viewConfigExtension);
}
@After
public void after()
{
this.viewConfigExtension.freeViewConfigCache(null);
}
@Test
public void testMetaDataInheritanceInTree()
{
this.viewConfigExtension.addPageDefinition(Pages.class);
ViewConfigNode node = this.viewConfigExtension.findNode(Pages.class);
Assert.assertNotNull(node);
Assert.assertNotNull(node.getParent()); //Root
Assert.assertNull(node.getParent().getParent());
Assert.assertNotNull(node.getChildren());
Assert.assertEquals(3, node.getChildren().size());
Assert.assertNotNull(node.getMetaData());
Assert.assertEquals(3, node.getMetaData().size());
Assert.assertNotNull(node.getInheritedMetaData());
Assert.assertEquals(0, node.getInheritedMetaData().size());
node = this.viewConfigExtension.findNode(Pages.Index.class);
Assert.assertNotNull(node);
Assert.assertNotNull(node.getParent()); //Pages
Assert.assertNotNull(node.getParent().getParent()); //Root
Assert.assertNull(node.getParent().getParent().getParent());
Assert.assertNotNull(node.getChildren());
Assert.assertEquals(0, node.getChildren().size());
Assert.assertNotNull(node.getMetaData());
Assert.assertEquals(0, node.getMetaData().size());
Assert.assertNotNull(node.getInheritedMetaData());
Assert.assertEquals(0, node.getInheritedMetaData().size());
node = this.viewConfigExtension.findNode(Pages.Admin.class);
Assert.assertNotNull(node);
Assert.assertNotNull(node.getParent()); //Admin
Assert.assertNotNull(node.getParent().getParent()); //Root
Assert.assertNull(node.getParent().getParent().getParent());
Assert.assertNotNull(node.getChildren());
Assert.assertEquals(1, node.getChildren().size());
Assert.assertNotNull(node.getMetaData());
Assert.assertEquals(0, node.getMetaData().size());
Assert.assertNotNull(node.getInheritedMetaData());
Assert.assertEquals(0, node.getInheritedMetaData().size());
node = this.viewConfigExtension.findNode(Pages.Admin.Index.class);
Assert.assertNotNull(node);
Assert.assertNotNull(node.getParent()); //Admin
Assert.assertNotNull(node.getParent().getParent()); //Pages
Assert.assertNotNull(node.getParent().getParent().getParent()); //Root
Assert.assertNull(node.getParent().getParent().getParent().getParent());
Assert.assertNotNull(node.getChildren());
Assert.assertEquals(0, node.getChildren().size());
Assert.assertNotNull(node.getMetaData());
Assert.assertEquals(1, node.getMetaData().size());
Assert.assertEquals(View.NavigationMode.DEFAULT, ((View) node.getMetaData().iterator().next()).navigation());
Assert.assertEquals(View.ViewParameterMode.DEFAULT, ((View) node.getMetaData().iterator().next()).viewParams());
Assert.assertEquals("", ((View) node.getMetaData().iterator().next()).name());
Assert.assertEquals(View.Extension.DEFAULT, ((View) node.getMetaData().iterator().next()).extension());
Assert.assertNotNull(node.getInheritedMetaData());
Assert.assertEquals(0, node.getInheritedMetaData().size());
Assert.assertNotNull(node.getInheritedMetaData());
Assert.assertEquals(0, node.getInheritedMetaData().size());
node = this.viewConfigExtension.findNode(Pages.Public.class);
Assert.assertNotNull(node);
Assert.assertNotNull(node.getParent()); //Public
Assert.assertNotNull(node.getParent().getParent()); //Root
Assert.assertNull(node.getParent().getParent().getParent());
Assert.assertNotNull(node.getChildren());
Assert.assertEquals(1, node.getChildren().size());
Assert.assertNotNull(node.getMetaData());
Assert.assertEquals(1, node.getMetaData().size());
Assert.assertEquals(Folder.class, node.getMetaData().iterator().next().annotationType());
Assert.assertNotNull(node.getInheritedMetaData());
Assert.assertEquals(0, node.getInheritedMetaData().size());
node = this.viewConfigExtension.findNode(Pages.Public.Index.class);
Assert.assertNotNull(node);
Assert.assertNotNull(node.getParent()); //Public
Assert.assertNotNull(node.getParent().getParent()); //Pages
Assert.assertNotNull(node.getParent().getParent().getParent()); //Root
Assert.assertNull(node.getParent().getParent().getParent().getParent());
Assert.assertNotNull(node.getChildren());
Assert.assertEquals(0, node.getChildren().size());
Assert.assertNotNull(node.getMetaData());
Assert.assertEquals(1, node.getMetaData().size());
Assert.assertEquals(View.NavigationMode.DEFAULT, ((View) node.getMetaData().iterator().next()).navigation());
Assert.assertEquals(View.ViewParameterMode.DEFAULT, ((View) node.getMetaData().iterator().next()).viewParams());
Assert.assertEquals("", ((View) node.getMetaData().iterator().next()).name());
Assert.assertEquals(View.Extension.DEFAULT, ((View) node.getMetaData().iterator().next()).extension());
Assert.assertNotNull(node.getInheritedMetaData());
Assert.assertEquals(0, node.getInheritedMetaData().size());
Assert.assertNotNull(node.getInheritedMetaData());
Assert.assertEquals(0, node.getInheritedMetaData().size());
}
@Test
public void testMetaDataInheritanceInViewConfig()
{
this.viewConfigExtension.addPageDefinition(Pages.class);
ViewConfigResolver viewConfigResolver = this.viewConfigResolverProducer.createViewConfigResolver();
ConfigDescriptor<?> configDescriptor = viewConfigResolver.getConfigDescriptor(Pages.class);
Assert.assertNotNull(configDescriptor);
Assert.assertNotNull(configDescriptor.getConfigClass());
Assert.assertEquals(Pages.class, configDescriptor.getConfigClass());
Assert.assertNotNull(configDescriptor.getMetaData());
Assert.assertEquals(1, configDescriptor.getMetaData().size());
Assert.assertEquals(1, configDescriptor.getMetaData(Folder.class).size());
Assert.assertEquals("/", configDescriptor.getMetaData(Folder.class).iterator().next().name());
ViewConfigDescriptor viewConfigDescriptor = viewConfigResolver.getViewConfigDescriptor(Pages.Index.class);
Assert.assertNotNull(viewConfigDescriptor);
Assert.assertEquals("/index.xhtml", viewConfigDescriptor.getViewId());
Assert.assertEquals(Pages.Index.class, viewConfigDescriptor.getConfigClass());
Assert.assertNotNull(viewConfigDescriptor.getMetaData());
Assert.assertEquals(1, viewConfigDescriptor.getMetaData().size());
Assert.assertEquals(View.NavigationMode.FORWARD, viewConfigDescriptor.getMetaData(View.class).iterator().next().navigation());
Assert.assertEquals(View.ViewParameterMode.EXCLUDE, viewConfigDescriptor.getMetaData(View.class).iterator().next().viewParams());
Assert.assertEquals("index", viewConfigDescriptor.getMetaData(View.class).iterator().next().name());
Assert.assertEquals("xhtml", viewConfigDescriptor.getMetaData(View.class).iterator().next().extension());
Assert.assertEquals("/", viewConfigDescriptor.getMetaData(View.class).iterator().next().basePath());
configDescriptor = viewConfigResolver.getConfigDescriptor(Pages.Admin.class);
Assert.assertNotNull(configDescriptor);
Assert.assertNotNull(configDescriptor.getConfigClass());
Assert.assertEquals(Pages.Admin.class, configDescriptor.getConfigClass());
Assert.assertNotNull(configDescriptor.getMetaData());
Assert.assertEquals(1, configDescriptor.getMetaData().size());
Assert.assertEquals(1, configDescriptor.getMetaData(Folder.class).size());
Assert.assertEquals("/admin/", configDescriptor.getMetaData(Folder.class).iterator().next().name());
viewConfigDescriptor = viewConfigResolver.getViewConfigDescriptor(Pages.Admin.Index.class);
Assert.assertNotNull(viewConfigDescriptor);
Assert.assertEquals("/admin/index.xhtml", viewConfigDescriptor.getViewId());
Assert.assertEquals(Pages.Admin.Index.class, viewConfigDescriptor.getConfigClass());
Assert.assertNotNull(viewConfigDescriptor.getMetaData());
Assert.assertEquals(1, viewConfigDescriptor.getMetaData().size());
Assert.assertEquals(View.NavigationMode.FORWARD, viewConfigDescriptor.getMetaData(View.class).iterator().next().navigation());
Assert.assertEquals(View.ViewParameterMode.EXCLUDE, viewConfigDescriptor.getMetaData(View.class).iterator().next().viewParams());
Assert.assertEquals("index", viewConfigDescriptor.getMetaData(View.class).iterator().next().name());
Assert.assertEquals("xhtml", viewConfigDescriptor.getMetaData(View.class).iterator().next().extension());
Assert.assertEquals("/admin/", viewConfigDescriptor.getMetaData(View.class).iterator().next().basePath());
configDescriptor = viewConfigResolver.getConfigDescriptor(Pages.Public.class);
Assert.assertNotNull(configDescriptor);
Assert.assertNotNull(configDescriptor.getConfigClass());
Assert.assertEquals(Pages.Public.class, configDescriptor.getConfigClass());
Assert.assertNotNull(configDescriptor.getMetaData());
Assert.assertEquals(1, configDescriptor.getMetaData().size());
Assert.assertEquals(1, configDescriptor.getMetaData(Folder.class).size());
Assert.assertEquals("/public/", configDescriptor.getMetaData(Folder.class).iterator().next().name());
viewConfigDescriptor = viewConfigResolver.getViewConfigDescriptor(Pages.Public.Index.class);
Assert.assertNotNull(viewConfigDescriptor);
Assert.assertEquals("/public/index.xhtml", viewConfigDescriptor.getViewId());
Assert.assertEquals(Pages.Public.Index.class, viewConfigDescriptor.getConfigClass());
Assert.assertNotNull(viewConfigDescriptor.getMetaData());
Assert.assertEquals(1, viewConfigDescriptor.getMetaData().size());
Assert.assertEquals(View.NavigationMode.FORWARD, viewConfigDescriptor.getMetaData(View.class).iterator().next().navigation());
Assert.assertEquals(View.ViewParameterMode.EXCLUDE, viewConfigDescriptor.getMetaData(View.class).iterator().next().viewParams());
Assert.assertEquals("index", viewConfigDescriptor.getMetaData(View.class).iterator().next().name());
Assert.assertEquals("xhtml", viewConfigDescriptor.getMetaData(View.class).iterator().next().extension());
Assert.assertEquals("/public/", viewConfigDescriptor.getMetaData(View.class).iterator().next().basePath());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.common.util;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hive.common.Pool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
/** Simple object pool of limited size. Implemented as a lock-free ring buffer;
* may fail to produce items if there are too many concurrent users. */
public class FixedSizedObjectPool<T> implements Pool<T> {
public static final Logger LOG = LoggerFactory.getLogger(FixedSizedObjectPool.class);
/**
* Ring buffer has two "markers" - where objects are present ('objects' list), and where they are
* removed ('empty' list). This class contains bit shifts and masks for one marker's components
* within a long, and provides utility methods to get/set the components.
* Marker consists of (examples here for 'objects' list; same for 'empty' list):
* - the marker itself. Set to NO_MARKER if list is empty (e.g. no objects to take from pool),
* otherwise contains the array index of the first element of the list.
* - the 'delta'. Number of elements from the marker that is being modified. Each concurrent
* modification (e.g. take call) increments this to claim an array index. Delta elements
* from the marker cannot be touched by other threads. Delta can never overshoot the other
* marker (or own marker if other is empty), or overflow MAX_DELTA. If delta is set to
* NO_DELTA, it means the marker has been modified during 'take' operation and list cannot
* be touched (see below). In any of these cases, take returns null.
* - the 'refcount'/'rc'. Number of operations occurring on the marker. Each e.g. take incs
* this; when the last of the overlapping operations decreases the refcount, it 'commits'
* the modifications by moving the marker according to delta and resetting delta to 0.
* If the other list does not exist, it's also created (i.e. first 'offer' to a new pool with
* empty 'objects' list will create the 'objects' list); if the list is being exhausted to empty
* by other op (e.g. pool has 2 objects, 2 takes are in progress when offer commits), the
* marker of the other list is still reset to new location, and delta is set to NO_DELTA,
* preventing operations on the lists until the exhausting ops commit and set delta to 0.
*/
private static final class Marker {
// Currently the long must fit 2 markers. Setting these bit sizes determines the balance
// between max pool size allowed and max concurrency allowed. This balance here is not what we
// want (up to 254 of each op while only 65535 objects limit), but it uses whole bytes and is
// good for now. Delta and RC take the same number of bits; usually it doesn't make sense to
// have more delta.
private static final long MARKER_MASK = 0xffffL, DELTA_MASK = 0xffL, RC_MASK = 0xffL;
public Marker(int markerShift, int deltaShift, int rcShift) {
this.markerShift = markerShift;
this.deltaShift = deltaShift;
this.rcShift = rcShift;
}
int markerShift, deltaShift, rcShift;
public final long setMarker(long dest, long val) {
return setValue(dest, val, markerShift, MARKER_MASK);
}
public final long setDelta(long dest, long val) {
return setValue(dest, val, deltaShift, DELTA_MASK);
}
public final long setRc(long dest, long val) {
return setValue(dest, val, rcShift, RC_MASK);
}
public final long getMarker(long src) {
return getValue(src, markerShift, MARKER_MASK);
}
public final long getDelta(long src) {
return getValue(src, deltaShift, DELTA_MASK);
}
public final long getRc(long src) {
return getValue(src, rcShift, RC_MASK);
}
private final long setValue(long dest, long val, int offset, long mask) {
return (dest & (~(mask << offset))) + (val << offset);
}
private final long getValue(long src, int offset, long mask) {
return (src >>> offset) & mask;
}
public String toString(long markers) {
return "{" + getMarker(markers) + ", " + getDelta(markers) + ", " + getRc(markers) + "}";
}
}
private static final long NO_MARKER = Marker.MARKER_MASK, NO_DELTA = Marker.DELTA_MASK,
MAX_DELTA = NO_DELTA - 1, MAX_SIZE = NO_MARKER - 1;
private static final long NO_INDEX = 0; // The array index can't be reserved.
// See Marker class comment.
private static final Marker OBJECTS = new Marker(48, 40, 32);
private static final Marker EMPTY = new Marker(16, 8, 0);
private final AtomicLong state;
private final PoolObjectHelper<T> helper;
private final T[] pool;
public FixedSizedObjectPool(int size, PoolObjectHelper<T> helper) {
this(size, helper, LOG.isTraceEnabled());
}
@VisibleForTesting
public FixedSizedObjectPool(int size, PoolObjectHelper<T> helper, boolean doTraceLog) {
if (size > MAX_SIZE) {
throw new AssertionError("Size must be <= " + MAX_SIZE);
}
this.helper = helper;
@SuppressWarnings("unchecked")
T[] poolTmp = (T[])new Object[size];
pool = poolTmp;
// Initially, all deltas and rcs are 0; empty list starts at 0; there are no objects to take.
state = new AtomicLong(OBJECTS.setMarker(0, NO_MARKER));
casLog = doTraceLog ? new CasLog() : null;
}
@Override
public T take() {
T result = pool.length > 0 ? takeImpl() : null;
return (result == null) ? helper.create() : result;
}
@Override
public void offer(T t) {
tryOffer(t);
}
@Override
public int size() {
return pool.length;
}
@VisibleForTesting
public boolean tryOffer(T t) {
if (t == null || pool.length == 0) return false; // 0 size means no-pooling case - passthru.
helper.resetBeforeOffer(t);
return offerImpl(t);
}
private T takeImpl() {
long oldState = reserveArrayIndex(OBJECTS, EMPTY);
if (oldState == NO_INDEX) return null; // For whatever reason, reserve failed.
long originalMarker = OBJECTS.getMarker(oldState), delta = OBJECTS.getDelta(oldState);
int arrayIndex = (int)getArrayIndex(originalMarker, delta);
T result = pool[arrayIndex];
if (result == null) {
throwError(oldState, arrayIndex, "null");
}
pool[arrayIndex] = null;
commitArrayIndex(OBJECTS, EMPTY, originalMarker);
return result;
}
private boolean offerImpl(T t) {
long oldState = reserveArrayIndex(EMPTY, OBJECTS);
if (oldState == NO_INDEX) return false; // For whatever reason, reserve failed.
long originalMarker = EMPTY.getMarker(oldState), delta = EMPTY.getDelta(oldState);
int arrayIndex = (int)getArrayIndex(originalMarker, delta);
if (pool[arrayIndex] != null) {
throwError(oldState, arrayIndex, "non-null");
}
pool[arrayIndex] = t;
commitArrayIndex(EMPTY, OBJECTS, originalMarker);
return true;
}
private void throwError(long oldState, int arrayIndex, String type) {
long newState = state.get();
if (casLog != null) {
casLog.dumpLog(true);
}
String msg = "Unexpected " + type + " at " + arrayIndex + "; state was "
+ toString(oldState) + ", now " + toString(newState);
LOG.info(msg);
throw new AssertionError(msg);
}
private long reserveArrayIndex(Marker from, Marker to) {
while (true) {
long oldVal = state.get(), marker = from.getMarker(oldVal), delta = from.getDelta(oldVal),
rc = from.getRc(oldVal), toMarker = to.getMarker(oldVal), toDelta = to.getDelta(oldVal);
if (marker == NO_MARKER) return NO_INDEX; // The list is empty.
if (delta == MAX_DELTA) return NO_INDEX; // Too many concurrent operations; spurious failure.
if (delta == NO_DELTA) return NO_INDEX; // List is drained and recreated concurrently.
if (toDelta == NO_DELTA) { // Same for the OTHER list; spurious.
// TODO: the fact that concurrent re-creation of other list necessitates full stop is not
// ideal... the reason is that the list NOT being re-created still uses the list
// being re-created for boundary check; it needs the old value of the other marker.
// However, NO_DELTA means the other marker was already set to a new value. For now,
// assume concurrent re-creation is rare and the gap before commit is tiny.
return NO_INDEX;
}
assert rc <= delta; // There can never be more concurrent takers than uncommitted ones.
long newDelta = incDeltaValue(marker, toMarker, delta); // Increase target list pos.
if (newDelta == NO_DELTA) return NO_INDEX; // Target list is being drained.
long newVal = from.setRc(from.setDelta(oldVal, newDelta), rc + 1); // Set delta and refcount.
if (setState(oldVal, newVal)) return oldVal;
}
}
private void commitArrayIndex(Marker from, Marker to, long originalMarker) {
while (true) {
long oldVal = state.get(), rc = from.getRc(oldVal);
long newVal = from.setRc(oldVal, rc - 1); // Decrease refcount.
assert rc > 0;
if (rc == 1) {
// We are the last of the concurrent operations to finish. Commit.
long marker = from.getMarker(oldVal), delta = from.getDelta(oldVal),
otherMarker = to.getMarker(oldVal), otherDelta = to.getDelta(oldVal);
assert rc <= delta;
// Move marker according to delta, change delta to 0.
long newMarker = applyDeltaToMarker(marker, otherMarker, delta);
newVal = from.setDelta(from.setMarker(newVal, newMarker), 0);
if (otherMarker == NO_MARKER) {
// The other list doesn't exist, create it at the first index of our op.
assert otherDelta == 0;
newVal = to.setMarker(newVal, originalMarker);
} else if (otherDelta > 0 && otherDelta != NO_DELTA
&& applyDeltaToMarker(otherMarker, marker, otherDelta) == NO_MARKER) {
// The other list will be exhausted when it commits. Create new one pending that commit.
newVal = to.setDelta(to.setMarker(newVal, originalMarker), NO_DELTA);
}
}
if (setState(oldVal, newVal)) return;
}
}
private boolean setState(long oldVal, long newVal) {
boolean result = state.compareAndSet(oldVal, newVal);
if (result && casLog != null) {
casLog.log(oldVal, newVal);
}
return result;
}
private long incDeltaValue(long markerFrom, long otherMarker, long delta) {
if (delta == pool.length) return NO_DELTA; // The (pool-sized) list is being fully drained.
long result = delta + 1;
if (getArrayIndex(markerFrom, result) == getArrayIndex(otherMarker, 1)) {
return NO_DELTA; // The list is being drained, cannot increase the delta anymore.
}
return result;
}
private long applyDeltaToMarker(long marker, long markerLimit, long delta) {
if (delta == NO_DELTA) return marker; // List was recreated while we were exhausting it.
if (delta == pool.length) {
assert markerLimit == NO_MARKER; // If we had the entire pool, other list couldn't exist.
return NO_MARKER; // We exhausted the entire-pool-sized list.
}
marker = getArrayIndex(marker, delta); // Just move the marker according to delta.
if (marker == markerLimit) return NO_MARKER; // We hit the limit - the list was exhausted.
return marker;
}
private long getArrayIndex(long marker, long delta) {
marker += delta;
if (marker >= pool.length) {
marker -= pool.length; // Wrap around at the end of buffer.
}
return marker;
}
static String toString(long markers) {
return OBJECTS.toString(markers) + ", " + EMPTY.toString(markers);
}
// TODO: Temporary for debugging. Doesn't interfere with MTT failures (unlike LOG.debug).
private final static class CasLog {
private final int size;
private final long[] log;
private final AtomicLong offset = new AtomicLong(-1);
public CasLog() {
size = 1 << 14 /* 256Kb in longs */;
log = new long[size];
}
public void log(long oldVal, long newVal) {
int ix = (int)((offset.incrementAndGet() << 1) & (size - 1));
log[ix] = oldVal;
log[ix + 1] = newVal;
}
public synchronized void dumpLog(boolean doSleep) {
if (doSleep) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
}
int logSize = (int)offset.get();
// TODO: dump the end if wrapping around?
for (int i = 0; i < logSize; ++i) {
LOG.info("CAS history dump: " + FixedSizedObjectPool.toString(log[i << 1]) + " => "
+ FixedSizedObjectPool.toString(log[(i << 1) + 1]));
}
offset.set(0);
}
}
private final CasLog casLog;
}
| |
// Copyright (c) 2003-2004 Brian Wellington (bwelling@xbill.org)
// Parts of this are derived from lib/dns/xfrin.c from BIND 9; its copyright
// notice follows.
/*
* Copyright (C) 1999-2001 Internet Software Consortium.
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SOFTWARE CONSORTIUM
* DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
* INTERNET SOFTWARE CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
* INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
* FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
* NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
* WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package org.xbill.DNS;
import java.io.*;
import java.net.*;
import java.util.*;
/**
* An incoming DNS Zone Transfer. To use this class, first initialize an
* object, then call the run() method. If run() doesn't throw an exception
* the result will either be an IXFR-style response, an AXFR-style response,
* or an indication that the zone is up to date.
*
* @author Brian Wellington
*/
public class ZoneTransferIn {
private static final int INITIALSOA = 0;
private static final int FIRSTDATA = 1;
private static final int IXFR_DELSOA = 2;
private static final int IXFR_DEL = 3;
private static final int IXFR_ADDSOA = 4;
private static final int IXFR_ADD = 5;
private static final int AXFR = 6;
private static final int END = 7;
private Name zname;
private int qtype;
private int dclass;
private long ixfr_serial;
private boolean want_fallback;
private ZoneTransferHandler handler;
private SocketAddress localAddress;
private SocketAddress address;
private TCPClient client;
private TSIG tsig;
private TSIG.StreamVerifier verifier;
private long timeout = 900 * 1000;
private int state;
private long end_serial;
private long current_serial;
private Record initialsoa;
private int rtype;
public static class Delta {
/**
* All changes between two versions of a zone in an IXFR response.
*/
/** The starting serial number of this delta. */
public long start;
/** The ending serial number of this delta. */
public long end;
/** A list of records added between the start and end versions */
public List adds;
/** A list of records deleted between the start and end versions */
public List deletes;
private
Delta() {
adds = new ArrayList();
deletes = new ArrayList();
}
}
public static interface ZoneTransferHandler {
/**
* Handles a Zone Transfer.
*/
/**
* Called when an AXFR transfer begins.
*/
public void startAXFR() throws ZoneTransferException;
/**
* Called when an IXFR transfer begins.
*/
public void startIXFR() throws ZoneTransferException;
/**
* Called when a series of IXFR deletions begins.
* @param soa The starting SOA.
*/
public void startIXFRDeletes(Record soa) throws ZoneTransferException;
/**
* Called when a series of IXFR adds begins.
* @param soa The starting SOA.
*/
public void startIXFRAdds(Record soa) throws ZoneTransferException;
/**
* Called for each content record in an AXFR.
* @param r The DNS record.
*/
public void handleRecord(Record r) throws ZoneTransferException;
};
private static class BasicHandler implements ZoneTransferHandler {
private List axfr;
private List ixfr;
public void startAXFR() {
axfr = new ArrayList();
}
public void startIXFR() {
ixfr = new ArrayList();
}
public void startIXFRDeletes(Record soa) {
Delta delta = new Delta();
delta.deletes.add(soa);
delta.start = getSOASerial(soa);
ixfr.add(delta);
}
public void startIXFRAdds(Record soa) {
Delta delta = (Delta) ixfr.get(ixfr.size() - 1);
delta.adds.add(soa);
delta.end = getSOASerial(soa);
}
public void handleRecord(Record r) {
List list;
if (ixfr != null) {
Delta delta = (Delta) ixfr.get(ixfr.size() - 1);
if (delta.adds.size() > 0)
list = delta.adds;
else
list = delta.deletes;
} else
list = axfr;
list.add(r);
}
};
private
ZoneTransferIn() {}
private
ZoneTransferIn(Name zone, int xfrtype, long serial, boolean fallback,
SocketAddress address, TSIG key)
{
this.address = address;
this.tsig = key;
if (zone.isAbsolute())
zname = zone;
else {
try {
zname = Name.concatenate(zone, Name.root);
}
catch (NameTooLongException e) {
throw new IllegalArgumentException("ZoneTransferIn: " +
"name too long");
}
}
qtype = xfrtype;
dclass = DClass.IN;
ixfr_serial = serial;
want_fallback = fallback;
state = INITIALSOA;
}
/**
* Instantiates a ZoneTransferIn object to do an AXFR (full zone transfer).
* @param zone The zone to transfer.
* @param address The host/port from which to transfer the zone.
* @param key The TSIG key used to authenticate the transfer, or null.
* @return The ZoneTransferIn object.
* @throws UnknownHostException The host does not exist.
*/
public static ZoneTransferIn
newAXFR(Name zone, SocketAddress address, TSIG key) {
return new ZoneTransferIn(zone, Type.AXFR, 0, false, address, key);
}
/**
* Instantiates a ZoneTransferIn object to do an AXFR (full zone transfer).
* @param zone The zone to transfer.
* @param host The host from which to transfer the zone.
* @param port The port to connect to on the server, or 0 for the default.
* @param key The TSIG key used to authenticate the transfer, or null.
* @return The ZoneTransferIn object.
* @throws UnknownHostException The host does not exist.
*/
public static ZoneTransferIn
newAXFR(Name zone, String host, int port, TSIG key)
throws UnknownHostException
{
if (port == 0)
port = SimpleResolver.DEFAULT_PORT;
return newAXFR(zone, new InetSocketAddress(host, port), key);
}
/**
* Instantiates a ZoneTransferIn object to do an AXFR (full zone transfer).
* @param zone The zone to transfer.
* @param host The host from which to transfer the zone.
* @param key The TSIG key used to authenticate the transfer, or null.
* @return The ZoneTransferIn object.
* @throws UnknownHostException The host does not exist.
*/
public static ZoneTransferIn
newAXFR(Name zone, String host, TSIG key)
throws UnknownHostException
{
return newAXFR(zone, host, 0, key);
}
/**
* Instantiates a ZoneTransferIn object to do an IXFR (incremental zone
* transfer).
* @param zone The zone to transfer.
* @param serial The existing serial number.
* @param fallback If true, fall back to AXFR if IXFR is not supported.
* @param address The host/port from which to transfer the zone.
* @param key The TSIG key used to authenticate the transfer, or null.
* @return The ZoneTransferIn object.
* @throws UnknownHostException The host does not exist.
*/
public static ZoneTransferIn
newIXFR(Name zone, long serial, boolean fallback, SocketAddress address,
TSIG key)
{
return new ZoneTransferIn(zone, Type.IXFR, serial, fallback, address,
key);
}
/**
* Instantiates a ZoneTransferIn object to do an IXFR (incremental zone
* transfer).
* @param zone The zone to transfer.
* @param serial The existing serial number.
* @param fallback If true, fall back to AXFR if IXFR is not supported.
* @param host The host from which to transfer the zone.
* @param port The port to connect to on the server, or 0 for the default.
* @param key The TSIG key used to authenticate the transfer, or null.
* @return The ZoneTransferIn object.
* @throws UnknownHostException The host does not exist.
*/
public static ZoneTransferIn
newIXFR(Name zone, long serial, boolean fallback, String host, int port,
TSIG key)
throws UnknownHostException
{
if (port == 0)
port = SimpleResolver.DEFAULT_PORT;
return newIXFR(zone, serial, fallback,
new InetSocketAddress(host, port), key);
}
/**
* Instantiates a ZoneTransferIn object to do an IXFR (incremental zone
* transfer).
* @param zone The zone to transfer.
* @param serial The existing serial number.
* @param fallback If true, fall back to AXFR if IXFR is not supported.
* @param host The host from which to transfer the zone.
* @param key The TSIG key used to authenticate the transfer, or null.
* @return The ZoneTransferIn object.
* @throws UnknownHostException The host does not exist.
*/
public static ZoneTransferIn
newIXFR(Name zone, long serial, boolean fallback, String host, TSIG key)
throws UnknownHostException
{
return newIXFR(zone, serial, fallback, host, 0, key);
}
/**
* Gets the name of the zone being transferred.
*/
public Name
getName() {
return zname;
}
/**
* Gets the type of zone transfer (either AXFR or IXFR).
*/
public int
getType() {
return qtype;
}
/**
* Sets a timeout on this zone transfer. The default is 900 seconds (15
* minutes).
* @param secs The maximum amount of time that this zone transfer can take.
*/
public void
setTimeout(int secs) {
if (secs < 0)
throw new IllegalArgumentException("timeout cannot be " +
"negative");
timeout = 1000L * secs;
}
/**
* Sets an alternate DNS class for this zone transfer.
* @param dclass The class to use instead of class IN.
*/
public void
setDClass(int dclass) {
DClass.check(dclass);
this.dclass = dclass;
}
/**
* Sets the local address to bind to when sending messages.
* @param addr The local address to send messages from.
*/
public void
setLocalAddress(SocketAddress addr) {
this.localAddress = addr;
}
private void
openConnection() throws IOException {
long endTime = System.currentTimeMillis() + timeout;
client = new TCPClient(endTime);
if (localAddress != null)
client.bind(localAddress);
client.connect(address);
}
private void
sendQuery() throws IOException {
Record question = Record.newRecord(zname, qtype, dclass);
Message query = new Message();
query.getHeader().setOpcode(Opcode.QUERY);
query.addRecord(question, Section.QUESTION);
if (qtype == Type.IXFR) {
Record soa = new SOARecord(zname, dclass, 0, Name.root,
Name.root, ixfr_serial,
0, 0, 0, 0);
query.addRecord(soa, Section.AUTHORITY);
}
if (tsig != null) {
tsig.apply(query, null);
verifier = new TSIG.StreamVerifier(tsig, query.getTSIG());
}
byte [] out = query.toWire(Message.MAXLENGTH);
client.send(out);
}
private static long
getSOASerial(Record rec) {
SOARecord soa = (SOARecord) rec;
return soa.getSerial();
}
private void
logxfr(String s) {
if (Options.check("verbose"))
System.out.println(zname + ": " + s);
}
private void
fail(String s) throws ZoneTransferException {
throw new ZoneTransferException(s);
}
private void
fallback() throws ZoneTransferException {
if (!want_fallback)
fail("server doesn't support IXFR");
logxfr("falling back to AXFR");
qtype = Type.AXFR;
state = INITIALSOA;
}
private void
parseRR(Record rec) throws ZoneTransferException {
int type = rec.getType();
Delta delta;
switch (state) {
case INITIALSOA:
if (type != Type.SOA)
fail("missing initial SOA");
initialsoa = rec;
// Remember the serial number in the initial SOA; we need it
// to recognize the end of an IXFR.
end_serial = getSOASerial(rec);
if (qtype == Type.IXFR &&
Serial.compare(end_serial, ixfr_serial) <= 0)
{
logxfr("up to date");
state = END;
break;
}
state = FIRSTDATA;
break;
case FIRSTDATA:
// If the transfer begins with 1 SOA, it's an AXFR.
// If it begins with 2 SOAs, it's an IXFR.
if (qtype == Type.IXFR && type == Type.SOA &&
getSOASerial(rec) == ixfr_serial)
{
rtype = Type.IXFR;
handler.startIXFR();
logxfr("got incremental response");
state = IXFR_DELSOA;
} else {
rtype = Type.IXFR;
handler.startAXFR();
handler.handleRecord(initialsoa);
logxfr("got nonincremental response");
state = AXFR;
}
parseRR(rec); // Restart...
return;
case IXFR_DELSOA:
handler.startIXFRDeletes(rec);
state = IXFR_DEL;
break;
case IXFR_DEL:
if (type == Type.SOA) {
current_serial = getSOASerial(rec);
state = IXFR_ADDSOA;
parseRR(rec); // Restart...
return;
}
handler.handleRecord(rec);
break;
case IXFR_ADDSOA:
handler.startIXFRAdds(rec);
state = IXFR_ADD;
break;
case IXFR_ADD:
if (type == Type.SOA) {
long soa_serial = getSOASerial(rec);
if (soa_serial == end_serial) {
state = END;
break;
} else if (soa_serial != current_serial) {
fail("IXFR out of sync: expected serial " +
current_serial + " , got " + soa_serial);
} else {
state = IXFR_DELSOA;
parseRR(rec); // Restart...
return;
}
}
handler.handleRecord(rec);
break;
case AXFR:
// Old BINDs sent cross class A records for non IN classes.
if (type == Type.A && rec.getDClass() != dclass)
break;
handler.handleRecord(rec);
if (type == Type.SOA) {
state = END;
}
break;
case END:
fail("extra data");
break;
default:
fail("invalid state");
break;
}
}
private void
closeConnection() {
try {
if (client != null)
client.cleanup();
}
catch (IOException e) {
}
}
private Message
parseMessage(byte [] b) throws WireParseException {
try {
return new Message(b);
}
catch (IOException e) {
if (e instanceof WireParseException)
throw (WireParseException) e;
throw new WireParseException("Error parsing message");
}
}
private void
doxfr() throws IOException, ZoneTransferException {
sendQuery();
while (state != END) {
byte [] in = client.recv();
Message response = parseMessage(in);
if (response.getHeader().getRcode() == Rcode.NOERROR &&
verifier != null)
{
TSIGRecord tsigrec = response.getTSIG();
int error = verifier.verify(response, in);
if (error != Rcode.NOERROR)
fail("TSIG failure");
}
Record [] answers = response.getSectionArray(Section.ANSWER);
if (state == INITIALSOA) {
int rcode = response.getRcode();
if (rcode != Rcode.NOERROR) {
if (qtype == Type.IXFR &&
rcode == Rcode.NOTIMP)
{
fallback();
doxfr();
return;
}
fail(Rcode.string(rcode));
}
Record question = response.getQuestion();
if (question != null && question.getType() != qtype) {
fail("invalid question section");
}
if (answers.length == 0 && qtype == Type.IXFR) {
fallback();
doxfr();
return;
}
}
for (int i = 0; i < answers.length; i++) {
parseRR(answers[i]);
}
if (state == END && verifier != null &&
!response.isVerified())
fail("last message must be signed");
}
}
/**
* Does the zone transfer.
* @param handler The callback object that handles the zone transfer data.
* @throws IOException The zone transfer failed to due an IO problem.
* @throws ZoneTransferException The zone transfer failed to due a problem
* with the zone transfer itself.
*/
public void
run(ZoneTransferHandler handler) throws IOException, ZoneTransferException {
this.handler = handler;
try {
openConnection();
doxfr();
}
finally {
closeConnection();
}
}
/**
* Does the zone transfer.
* @return A list, which is either an AXFR-style response (List of Records),
* and IXFR-style response (List of Deltas), or null, which indicates that
* an IXFR was performed and the zone is up to date.
* @throws IOException The zone transfer failed to due an IO problem.
* @throws ZoneTransferException The zone transfer failed to due a problem
* with the zone transfer itself.
*/
public List
run() throws IOException, ZoneTransferException {
BasicHandler handler = new BasicHandler();
run(handler);
if (handler.axfr != null)
return handler.axfr;
return handler.ixfr;
}
private BasicHandler
getBasicHandler() throws IllegalArgumentException {
if (handler instanceof BasicHandler)
return (BasicHandler) handler;
throw new IllegalArgumentException("ZoneTransferIn used callback " +
"interface");
}
/**
* Returns true if the response is an AXFR-style response (List of Records).
* This will be true if either an IXFR was performed, an IXFR was performed
* and the server provided a full zone transfer, or an IXFR failed and
* fallback to AXFR occurred.
*/
public boolean
isAXFR() {
return (rtype == Type.AXFR);
}
/**
* Gets the AXFR-style response.
* @throws IllegalArgumentException The transfer used the callback interface,
* so the response was not stored.
*/
public List
getAXFR() {
BasicHandler handler = getBasicHandler();
return handler.axfr;
}
/**
* Returns true if the response is an IXFR-style response (List of Deltas).
* This will be true only if an IXFR was performed and the server provided
* an incremental zone transfer.
*/
public boolean
isIXFR() {
return (rtype == Type.IXFR);
}
/**
* Gets the IXFR-style response.
* @throws IllegalArgumentException The transfer used the callback interface,
* so the response was not stored.
*/
public List
getIXFR() {
BasicHandler handler = getBasicHandler();
return handler.ixfr;
}
/**
* Returns true if the response indicates that the zone is up to date.
* This will be true only if an IXFR was performed.
* @throws IllegalArgumentException The transfer used the callback interface,
* so the response was not stored.
*/
public boolean
isCurrent() {
BasicHandler handler = getBasicHandler();
return (handler.axfr == null && handler.ixfr == null);
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Thief;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2020-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Thief_WhipStrip extends ThiefSkill
{
@Override
public String ID()
{
return "Thief_WhipStrip";
}
private final static String localizedName = CMLib.lang().L("Whip Strip");
@Override
public String name()
{
return localizedName;
}
@Override
protected int canAffectCode()
{
return CAN_MOBS;
}
@Override
protected int canTargetCode()
{
return CAN_MOBS;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_MALICIOUS;
}
@Override
public int classificationCode()
{
return Ability.ACODE_THIEF_SKILL|Ability.DOMAIN_STEALING;
}
private static final String[] triggerStrings =I(new String[] {"WHIPSTRIP"});
@Override
public String[] triggerStrings()
{
return triggerStrings;
}
@Override
public int usageType()
{
return USAGE_MOVEMENT;
}
public int code=0;
@Override
public int abilityCode()
{
return code;
}
@Override
public void setAbilityCode(final int newCode)
{
code=newCode;
}
protected Item stolenI=null;
protected MOB target=null;
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if((affected==null)||(!(affected instanceof MOB))||(target==null))
return super.okMessage(myHost,msg);
final MOB mob=(MOB)affected;
MOB target=this.target;
if(target==null)
target=mob.getVictim();
if(msg.amISource(mob)
&&(msg.amITarget(target))
&&(target instanceof MOB)
&&(msg.targetMinor()==CMMsg.TYP_DAMAGE)
&&(msg.tool()==mob.fetchWieldedItem())
&&(msg.tool()!=null)
&&(msg.value()>0)
&&(this.prereqs(msg.source(),true)))
{
final Room R=target.location();
if(R==null)
return super.okMessage(myHost, msg);
Item stolenI=this.stolenI;
if((stolenI==null)||(!stolenI.amBeingWornProperly()))
stolenI=target.fetchItem(null,Wearable.FILTER_WORNONLY,"all");
if((stolenI==null)||(!stolenI.amBeingWornProperly()))
return super.okMessage(myHost, msg);
final String str=L("<S-NAME> whip strip(s) @x1 off <T-NAMESELF>.",stolenI.name());
final String hisStr=str;
final int hisCode=CMMsg.MSG_THIEF_ACT | ((target.mayIFight(mob))?CMMsg.MASK_MALICIOUS:0);
CMMsg msg2=CMClass.getMsg(mob,target,this,CMMsg.MSG_THIEF_ACT,str,hisCode,hisStr,CMMsg.NO_EFFECT,null);
if(R.okMessage(mob,msg2))
{
R.send(mob,msg2);
msg2=CMClass.getMsg(target,stolenI,null,CMMsg.MSG_REMOVE,CMMsg.MSG_REMOVE,CMMsg.MSG_NOISE,null);
if(R.okMessage(target,msg2))
{
R.send(mob,msg2);
msg2=CMClass.getMsg(target,stolenI,null,CMMsg.MSG_DROP,CMMsg.MSG_DROP,CMMsg.MSG_NOISE,null);
if(R.okMessage(target,msg2))
R.send(mob,msg2);
}
}
unInvoke();
}
return super.okMessage(myHost,msg);
}
@Override
public int castingQuality(final MOB mob, final Physical target)
{
if(mob!=null)
{
if(!(target instanceof MOB))
return Ability.QUALITY_INDIFFERENT;
if(((MOB)target).amDead()||(!CMLib.flags().canBeSeenBy(target,mob)))
return Ability.QUALITY_INDIFFERENT;
if(!((MOB)target).mayIFight(mob))
return Ability.QUALITY_INDIFFERENT;
if(!prereqs(mob,true))
return Ability.QUALITY_INDIFFERENT;
}
return super.castingQuality(mob,target);
}
protected boolean prereqs(final MOB mob, final boolean quiet)
{
final Item w=mob.fetchWieldedItem();
if((w==null)
||(!(w instanceof Weapon))
||(((Weapon)w).weaponClassification()!=Weapon.CLASS_FLAILED)
||((((Weapon)w).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_LEATHER))
{
if(!quiet)
mob.tell(L("You need a leather flailed weapon to perform a whipstrip!"));
return false;
}
return true;
}
@Override
public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel)
{
String itemToSteal="all";
if(!auto)
{
if(commands.size()<2)
{
mob.tell(L("Whip Strip what off of whom?"));
return false;
}
itemToSteal=commands.get(0);
}
MOB target=null;
if((givenTarget!=null)&&(givenTarget instanceof MOB))
target=(MOB)givenTarget;
else
target=mob.location().fetchInhabitant(CMParms.combine(commands,1));
if((target==null)||(target.amDead())||(!CMLib.flags().canBeSeenBy(target,mob)))
{
mob.tell(L("You don't see '@x1' here.",CMParms.combine(commands,1)));
return false;
}
int levelDiff=target.phyStats().level()-(mob.phyStats().level()+abilityCode()+(getXLEVELLevel(mob)*2));
if((!target.mayIFight(mob))||(levelDiff>15))
{
mob.tell(L("You cannot strip anything off of @x1.",target.charStats().himher()));
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final Item stolenI=target.fetchItem(null,Wearable.FILTER_WORNONLY,itemToSteal);
if((stolenI==null)||(!CMLib.flags().canBeSeenBy(stolenI,mob)))
{
mob.tell(L("@x1 doesn't seem to be wearing '@x2'.",target.name(mob),itemToSteal));
return false;
}
if(levelDiff>0)
levelDiff=-(levelDiff*((!CMLib.flags().canBeSeenBy(mob,target))?5:15));
else
levelDiff=-(levelDiff*((CMLib.flags().canBeSeenBy(mob,target))?1:2));
final boolean success=proficiencyCheck(mob,levelDiff,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,(auto?CMMsg.MASK_ALWAYS:0)|CMMsg.MASK_MALICIOUS|CMMsg.MSG_NOISYMOVEMENT,
L("^F^<FIGHT^><S-NAME> wind(s) up to whipstrip <T-NAME>!^</FIGHT^>^?"));
CMLib.color().fixSourceFightColor(msg);
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
invoker=mob;
final Thief_WhipStrip A=(Thief_WhipStrip)beneficialAffect(mob,mob,asLevel,2);
A.target=target;
A.stolenI=stolenI;
mob.recoverPhyStats();
}
}
else
return maliciousFizzle(mob,target,L("<S-NAME> wind(s) up to whipstrip <T-NAME>, but fail(s)."));
return success;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.integration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import javax.cache.Cache;
import javax.cache.integration.CacheLoaderException;
import javax.cache.integration.CacheWriterException;
import javax.cache.processor.EntryProcessor;
import javax.cache.processor.MutableEntry;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.store.CacheStore;
import org.apache.ignite.cache.store.CacheStoreSession;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.processors.cache.IgniteCacheAbstractTest;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.lang.IgniteBiInClosure;
import org.apache.ignite.resources.CacheStoreSessionResource;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.Test;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
/**
*
*/
public abstract class IgniteCacheStoreSessionAbstractTest extends IgniteCacheAbstractTest {
/** */
protected static volatile List<ExpectedData> expData;
/** */
protected static final String CACHE_NAME1 = "cache1";
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
TestStore store = new TestStore(); // Use the same store instance for both caches.
assert cfg.getCacheConfiguration().length == 1;
CacheConfiguration ccfg0 = cfg.getCacheConfiguration()[0];
ccfg0.setReadThrough(true);
ccfg0.setWriteThrough(true);
ccfg0.setCacheStoreFactory(singletonFactory(store));
CacheConfiguration ccfg1 = cacheConfiguration(igniteInstanceName);
ccfg1.setReadThrough(true);
ccfg1.setWriteThrough(true);
ccfg1.setName(CACHE_NAME1);
ccfg1.setCacheStoreFactory(singletonFactory(store));
cfg.setCacheConfiguration(ccfg0, ccfg1);
return cfg;
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
super.afterTestsStopped();
expData = null;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
expData = Collections.synchronizedList(new ArrayList<ExpectedData>());
super.beforeTestsStarted();
}
/**
* @param cache Cache.
* @param cnt Keys count.
* @return Keys.
* @throws Exception If failed.
*/
protected List<Integer> testKeys(IgniteCache cache, int cnt) throws Exception {
return primaryKeys(cache, cnt, 0);
}
/**
* @throws Exception If failed.
*/
@Test
public void testStoreSession() throws Exception {
assertEquals(DEFAULT_CACHE_NAME, jcache(0).getName());
assertEquals(CACHE_NAME1, ignite(0).cache(CACHE_NAME1).getName());
testStoreSession(jcache(0));
testStoreSession(ignite(0).cache(CACHE_NAME1));
}
/**
* @param cache Cache.
* @throws Exception If failed.
*/
private void testStoreSession(IgniteCache<Object, Object> cache) throws Exception {
Set<Integer> keys = new HashSet<>(primaryKeys(cache, 3, 100_000));
Integer key = keys.iterator().next();
boolean tx = atomicityMode() == TRANSACTIONAL;
expData.add(new ExpectedData(false, "load", new TreeMap<>(), cache.getName()));
assertEquals(key, cache.get(key));
assertTrue(expData.isEmpty());
expData.add(new ExpectedData(false, "loadAll", new TreeMap<>(), cache.getName()));
assertEquals(3, cache.getAll(keys).size());
assertTrue(expData.isEmpty());
expectedData(tx, "write", cache.getName());
cache.put(key, key);
assertTrue(expData.isEmpty());
expectedData(tx, "write", cache.getName());
cache.invoke(key, new EntryProcessor<Object, Object, Object>() {
@Override public Object process(MutableEntry<Object, Object> e, Object... args) {
e.setValue("val1");
return null;
}
});
assertTrue(expData.isEmpty());
expectedData(tx, "delete", cache.getName());
cache.remove(key);
assertTrue(expData.isEmpty());
Map<Object, Object> vals = new TreeMap<>();
for (Object key0 : keys)
vals.put(key0, key0);
expectedData(tx, "writeAll", cache.getName());
cache.putAll(vals);
assertTrue(expData.isEmpty());
expectedData(tx, "deleteAll", cache.getName());
cache.removeAll(keys);
assertTrue(expData.isEmpty());
expectedData(false, "loadCache", cache.getName());
cache.localLoadCache(null);
assertTrue(expData.isEmpty());
}
/**
* @param tx {@code True} is transaction is expected.
* @param expMtd Expected method.
* @param expCacheName Expected cache name.
*/
private void expectedData(boolean tx, String expMtd, String expCacheName) {
expData.add(new ExpectedData(tx, expMtd, new TreeMap<>(), expCacheName));
if (tx)
expData.add(new ExpectedData(true, "sessionEnd", F.<Object, Object>asMap(0, expMtd), expCacheName));
}
/**
*
*/
static class ExpectedData {
/** */
private final boolean tx;
/** */
private final String expMtd;
/** */
private final Map<Object, Object> expProps;
/** */
private final String expCacheName;
/**
* @param tx {@code True} if transaction is enabled.
* @param expMtd Expected method.
* @param expProps Expected properties.
* @param expCacheName Expected cache name.
*/
ExpectedData(boolean tx, String expMtd, Map<Object, Object> expProps, @NotNull String expCacheName) {
this.tx = tx;
this.expMtd = expMtd;
this.expProps = expProps;
this.expCacheName = expCacheName;
}
}
/**
*
*/
private static class AbstractStore {
/** */
@CacheStoreSessionResource
protected CacheStoreSession sesInParent;
}
/**
*
*/
private class TestStore extends AbstractStore implements CacheStore<Object, Object> {
/** Auto-injected store session. */
@CacheStoreSessionResource
private CacheStoreSession ses;
/** */
@IgniteInstanceResource
protected Ignite ignite;
/** {@inheritDoc} */
@Override public void loadCache(IgniteBiInClosure<Object, Object> clo, @Nullable Object... args) {
log.info("Load cache [tx=" + session().transaction() + ']');
checkSession("loadCache");
}
/** {@inheritDoc} */
@Override public void sessionEnd(boolean commit) throws CacheWriterException {
if (session().isWithinTransaction()) {
log.info("Tx end [commit=" + commit + ", tx=" + session().transaction() + ']');
checkSession("sessionEnd");
}
}
/** {@inheritDoc} */
@Override public Object load(Object key) throws CacheLoaderException {
log.info("Load [key=" + key + ", tx=" + session().transaction() + ']');
checkSession("load");
return key;
}
/** {@inheritDoc} */
@Override public Map<Object, Object> loadAll(Iterable<?> keys) throws CacheLoaderException {
log.info("LoadAll [keys=" + keys + ", tx=" + session().transaction() + ']');
checkSession("loadAll");
Map<Object, Object> loaded = new TreeMap<>();
for (Object key : keys)
loaded.put(key, key);
return loaded;
}
/** {@inheritDoc} */
@Override public void write(Cache.Entry<?, ?> entry) throws CacheWriterException {
log.info("Write [write=" + entry + ", tx=" + session().transaction() + ']');
checkSession("write");
}
/** {@inheritDoc} */
@Override public void writeAll(Collection<Cache.Entry<?, ?>> entries) throws CacheWriterException {
log.info("WriteAll: [writeAll=" + entries + ", tx=" + session().transaction() + ']');
checkSession("writeAll");
}
/** {@inheritDoc} */
@Override public void delete(Object key) throws CacheWriterException {
log.info("Delete [key=" + key + ", tx=" + session().transaction() + ']');
checkSession("delete");
}
/** {@inheritDoc} */
@Override public void deleteAll(Collection<?> keys) throws CacheWriterException {
log.info("DeleteAll [keys=" + keys + ", tx=" + session().transaction() + ']');
checkSession("deleteAll");
}
/**
* @return Store session.
*/
private CacheStoreSession session() {
return ses;
}
/**
* @param mtd Called stored method.
*/
private void checkSession(String mtd) {
assertNotNull(ignite);
assertFalse(expData.isEmpty());
ExpectedData exp = expData.remove(0);
assertEquals(exp.expMtd, mtd);
CacheStoreSession ses = session();
assertNotNull(ses);
assertSame(ses, sesInParent);
if (exp.tx)
assertNotNull(ses.transaction());
else
assertNull(ses.transaction());
Map<Object, Object> props = ses.properties();
assertNotNull(props);
assertEquals(exp.expProps, props);
props.put(props.size(), mtd);
assertEquals(exp.expCacheName, ses.cacheName());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.compression;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import org.apache.geode.cache.CacheListener;
import org.apache.geode.cache.CacheWriter;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.EntryEvent;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.util.CacheListenerAdapter;
import org.apache.geode.cache.util.CacheWriterAdapter;
import org.apache.geode.compression.Compressor;
import org.apache.geode.compression.SnappyCompressor;
import org.apache.geode.internal.cache.EntryEventImpl;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.SerializableCallable;
import org.apache.geode.test.dunit.SerializableRunnable;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase;
/**
* Asserts that values received in EntryEvents for CacheWriters and CacheListeners are not
* compressed.
*
*/
public class CompressionCacheListenerDUnitTest extends JUnit4CacheTestCase {
/**
* The name of our test region.
*/
public static final String REGION_NAME = "compressedRegion";
/**
* Test virtual machine number.
*/
public static final int TEST_VM = 0;
/**
* A key.
*/
public static final String KEY_1 = "key1";
/**
* Another key.
*/
public static final String KEY_2 = "key2";
/**
* Yet another key.
*/
public static final String KEY_3 = "key3";
/**
* A value.
*/
public static final String VALUE_1 =
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aliquam auctor bibendum tempus. Suspendisse potenti. Ut enim neque, mattis et mattis ac, vulputate quis leo. Cras a metus metus, eget cursus ipsum. Aliquam sagittis condimentum massa aliquet rhoncus. Aliquam sed luctus neque. In hac habitasse platea dictumst.";
/**
* Another value.
*/
private static final String VALUE_2 =
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Praesent sit amet lorem consequat est commodo lacinia. Duis tortor sem, facilisis quis tempus in, luctus lacinia metus. Vivamus augue justo, porttitor in vulputate accumsan, adipiscing sit amet sem. Quisque faucibus porta ipsum in pellentesque. Donec malesuada ultrices sapien sit amet tempus. Sed fringilla ipsum at tellus condimentum et hendrerit arcu pretium. Nulla non leo ligula. Etiam commodo tempor ligula non placerat. Vivamus vestibulum varius arcu a varius. Duis sit amet erat imperdiet dui mattis auctor et id orci. Suspendisse non elit augue. Quisque ac orci turpis, nec sollicitudin justo. Sed bibendum justo ut lacus aliquet lacinia et et neque. Proin hendrerit varius mauris vel lacinia. Proin pellentesque lacus vitae nisl euismod bibendum.";
/**
* Yet another value.
*/
private static final String VALUE_3 =
"In ut nisi nisi, eu malesuada mauris. Vestibulum nec tellus felis. Pellentesque mauris ligula, pretium nec consequat ut, adipiscing non lorem. Vivamus pulvinar viverra nisl, sit amet vestibulum tellus lobortis in. Pellentesque blandit ipsum sed neque rhoncus eu tristique risus porttitor. Vivamus molestie dapibus mi in lacinia. Suspendisse bibendum, purus at gravida accumsan, libero turpis elementum leo, eget posuere purus nibh ac dolor.";
/**
* Queues events received by the CacheListener.
*/
public static final BlockingQueue<EntryEvent> LISTENER_QUEUE =
new LinkedBlockingQueue<EntryEvent>(1);
/**
* A CacheListener that simply stores received events in a queue for evaluating.
*/
private static final CacheListener<String, String> CACHE_LISTENER =
new CacheListenerAdapter<String, String>() {
@Override
public void afterCreate(EntryEvent<String, String> event) {
EntryEventImpl copy = new EntryEventImpl((EntryEventImpl) event);
copy.copyOffHeapToHeap();
LISTENER_QUEUE.add(copy);
}
@Override
public void afterDestroy(EntryEvent<String, String> event) {
EntryEventImpl copy = new EntryEventImpl((EntryEventImpl) event);
copy.copyOffHeapToHeap();
LISTENER_QUEUE.add(copy);
}
@Override
public void afterInvalidate(EntryEvent<String, String> event) {
EntryEventImpl copy = new EntryEventImpl((EntryEventImpl) event);
copy.copyOffHeapToHeap();
LISTENER_QUEUE.add(copy);
}
@Override
public void afterUpdate(EntryEvent<String, String> event) {
EntryEventImpl copy = new EntryEventImpl((EntryEventImpl) event);
copy.copyOffHeapToHeap();
LISTENER_QUEUE.add(copy);
}
};
/**
* A queue for storing events received by a CacheWriter.
*/
public static final BlockingQueue<EntryEvent> WRITER_QUEUE =
new LinkedBlockingQueue<EntryEvent>(1);
/**
* A CacheWriter that simply stores received events in a queue for evaluation.
*/
private static final CacheWriter<String, String> CACHE_WRITER =
new CacheWriterAdapter<String, String>() {
@Override
public void beforeCreate(EntryEvent<String, String> event) {
EntryEventImpl copy = new EntryEventImpl((EntryEventImpl) event);
copy.copyOffHeapToHeap();
WRITER_QUEUE.add(copy);
}
@Override
public void beforeDestroy(EntryEvent<String, String> event) {
EntryEventImpl copy = new EntryEventImpl((EntryEventImpl) event);
copy.copyOffHeapToHeap();
WRITER_QUEUE.add(copy);
}
@Override
public void beforeUpdate(EntryEvent<String, String> event) {
EntryEventImpl copy = new EntryEventImpl((EntryEventImpl) event);
copy.copyOffHeapToHeap();
WRITER_QUEUE.add(copy);
}
};
/**
* Creates a new CompressionCacheListenerDUnitTest.
*
* @param name a test name.
*/
public CompressionCacheListenerDUnitTest() {
super();
}
@Override
public final void postSetUp() throws Exception {
disconnectAllFromDS();
createRegion();
}
protected void createRegion() {
createCompressedRegionOnVm(getVM(TEST_VM), REGION_NAME, new SnappyCompressor());
}
@Override
public final void preTearDownCacheTestCase() throws Exception {
preTearDownCompressionCacheListenerDUnitTest();
cleanup(getVM(TEST_VM));
}
protected void preTearDownCompressionCacheListenerDUnitTest() throws Exception {}
/**
* Returns the VM for a given identifier.
*
* @param vm a virtual machine identifier.
*/
protected VM getVM(int vm) {
return Host.getHost(0).getVM(vm);
}
/**
* Removes created regions from a VM.
*
* @param vm the virtual machine to cleanup.
*/
private void cleanup(final VM vm) {
vm.invoke(new SerializableRunnable() {
@Override
public void run() {
getCache().getRegion(REGION_NAME).destroyRegion();
}
});
}
/**
* Tests CacheWriter and CacheListener events on the test vm.
*/
@Test
public void testCacheListenerAndWriter() {
testCacheListenerAndWriterWithVM(getVM(TEST_VM));
}
/**
* Tests that received values in EntryEvents are not compressed for the following methods:
*
* <ul>
* <li>{@link CacheWriter#beforeCreate(EntryEvent)}</li>
* <li>{@link CacheListener#afterCreate(EntryEvent)}</li>
* <li>{@link CacheWriter#beforeUpdate(EntryEvent)}</li>
* <li>{@link CacheListener#afterUpdate(EntryEvent)}</li>
* <li>{@link CacheListener#afterInvalidate(EntryEvent)}</li>
* <li>{@link CacheWriter#beforeDestroy(EntryEvent)}</li>
* <li>{@link CacheListener#afterDestroy(EntryEvent)}</li>
* </ul>
*
* @param vm a virtual machine to perform the test on.
*/
private void testCacheListenerAndWriterWithVM(final VM vm) {
vm.invoke(new SerializableRunnable() {
@Override
public void run() {
try {
Region<String, String> region = getCache().getRegion(REGION_NAME);
assertNotNull(region);
assertNull(region.get(KEY_1));
// beforeCreate
String oldValue = region.put(KEY_1, VALUE_1);
EntryEvent<String, String> event = WRITER_QUEUE.poll(5, TimeUnit.SECONDS);
assertNotNull(event);
assertNull(oldValue);
assertNull(event.getOldValue());
assertEquals(VALUE_1, event.getNewValue());
assertEquals(KEY_1, event.getKey());
// afterCreate
event = LISTENER_QUEUE.poll(5, TimeUnit.SECONDS);
assertNotNull(event);
assertNull(event.getOldValue());
assertEquals(VALUE_1, event.getNewValue());
assertEquals(KEY_1, event.getKey());
// beforeUpdate
oldValue = region.put(KEY_1, VALUE_2);
event = WRITER_QUEUE.poll(5, TimeUnit.SECONDS);
assertNotNull(event);
assertNotNull(oldValue);
assertEquals(VALUE_1, oldValue);
assertEquals(VALUE_1, event.getOldValue());
assertEquals(VALUE_2, event.getNewValue());
assertEquals(KEY_1, event.getKey());
// afterUpdate
event = LISTENER_QUEUE.poll(5, TimeUnit.SECONDS);
assertNotNull(event);
assertEquals(VALUE_1, event.getOldValue());
assertEquals(VALUE_2, event.getNewValue());
assertEquals(KEY_1, event.getKey());
// afterInvalidate
region.invalidate(KEY_1);
event = LISTENER_QUEUE.poll(5, TimeUnit.SECONDS);
assertNotNull(event);
assertEquals(VALUE_2, event.getOldValue());
assertNull(event.getNewValue());
assertEquals(KEY_1, event.getKey());
// beforeDestroy
oldValue = region.destroy(KEY_1);
event = WRITER_QUEUE.poll(5, TimeUnit.SECONDS);
assertNull(oldValue);
assertNotNull(event);
assertNull(event.getOldValue());
assertNull(event.getNewValue());
assertEquals(KEY_1, event.getKey());
// afterDestroy
event = LISTENER_QUEUE.poll(5, TimeUnit.SECONDS);
assertNotNull(event);
assertNull(event.getOldValue());
assertNull(event.getNewValue());
assertEquals(KEY_1, event.getKey());
} catch (InterruptedException e) {
fail();
}
}
});
}
/**
* Creates a region and assigns a compressor.
*
* @param vm a virtual machine to create the region on.
* @param name a region name.
* @param compressor a compressor.
*/
private void createCompressedRegionOnVm(final VM vm, final String name,
final Compressor compressor) {
createCompressedRegionOnVm(vm, name, compressor, false);
}
protected void createCompressedRegionOnVm(final VM vm, final String name,
final Compressor compressor, final boolean offHeap) {
vm.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
createRegion(name, compressor, offHeap);
return Boolean.TRUE;
}
});
}
/**
* Creates a region and assigns a compressor.
*
* @param name a region name.
* @param compressor a compressor.
*/
private Region createRegion(String name, Compressor compressor, boolean offHeap) {
return getCache().<String, String>createRegionFactory().addCacheListener(CACHE_LISTENER)
.setCacheWriter(CACHE_WRITER).setDataPolicy(DataPolicy.REPLICATE).setCloningEnabled(true)
.setCompressor(compressor).setOffHeap(offHeap).create(name);
}
}
| |
package org.sagebionetworks.repo.web.service.table;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.sagebionetworks.manager.util.Validate;
import org.sagebionetworks.repo.manager.EntityManager;
import org.sagebionetworks.repo.manager.UserManager;
import org.sagebionetworks.repo.manager.file.FileHandleManager;
import org.sagebionetworks.repo.manager.table.ColumnModelManager;
import org.sagebionetworks.repo.manager.table.TableEntityManager;
import org.sagebionetworks.repo.manager.table.TableIndexConnectionFactory;
import org.sagebionetworks.repo.manager.table.TableManagerSupport;
import org.sagebionetworks.repo.manager.table.TableQueryManager;
import org.sagebionetworks.repo.model.DatastoreException;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.file.FileHandle;
import org.sagebionetworks.repo.model.file.FileHandleResults;
import org.sagebionetworks.repo.model.table.ColumnModel;
import org.sagebionetworks.repo.model.table.ColumnModelPage;
import org.sagebionetworks.repo.model.table.ColumnType;
import org.sagebionetworks.repo.model.table.PaginatedColumnModels;
import org.sagebionetworks.repo.model.table.Row;
import org.sagebionetworks.repo.model.table.RowReference;
import org.sagebionetworks.repo.model.table.RowReferenceSet;
import org.sagebionetworks.repo.model.table.RowSelection;
import org.sagebionetworks.repo.model.table.RowSet;
import org.sagebionetworks.repo.model.table.TableFileHandleResults;
import org.sagebionetworks.repo.model.table.ViewType;
import org.sagebionetworks.repo.web.NotFoundException;
import org.springframework.beans.factory.annotation.Autowired;
import com.google.common.collect.Lists;
/**
* Basic implementation of the TableServices.
*
*/
public class TableServicesImpl implements TableServices {
@Autowired
UserManager userManager;
@Autowired
ColumnModelManager columnModelManager;
@Autowired
EntityManager entityManager;
@Autowired
TableEntityManager tableEntityManager;
@Autowired
FileHandleManager fileHandleManager;
@Autowired
TableQueryManager tableQueryManager;
@Autowired
TableManagerSupport tableManagerSupport;
@Autowired
TableIndexConnectionFactory connectionFactory;
@Override
public ColumnModel createColumnModel(Long userId, ColumnModel columnModel) throws DatastoreException, NotFoundException {
UserInfo user = userManager.getUserInfo(userId);
return columnModelManager.createColumnModel(user, columnModel);
}
@Override
public List<ColumnModel> createColumnModels(Long userId, List<ColumnModel> columnModels) throws DatastoreException, NotFoundException {
UserInfo user = userManager.getUserInfo(userId);
return columnModelManager.createColumnModels(user, columnModels);
}
@Override
public ColumnModel getColumnModel(Long userId, String columnId) throws DatastoreException, NotFoundException {
UserInfo user = userManager.getUserInfo(userId);
return columnModelManager.getColumnModel(user, columnId);
}
@Override
public PaginatedColumnModels getColumnModelsForTableEntity(Long userId, String entityId) throws DatastoreException, NotFoundException {
UserInfo user = userManager.getUserInfo(userId);
List<ColumnModel> models = columnModelManager.getColumnModelsForTable(user, entityId);
PaginatedColumnModels pcm = new PaginatedColumnModels();
pcm.setResults(models);
pcm.setTotalNumberOfResults((long) models.size());
return pcm;
}
@Override
public PaginatedColumnModels listColumnModels(Long userId, String prefix, Long limit, Long offset) throws DatastoreException, NotFoundException {
UserInfo user = userManager.getUserInfo(userId);
if(limit == null){
limit = new Long(10);
}
if(offset == null){
offset = new Long(0);
}
return columnModelManager.listColumnModels(user, prefix, limit, offset);
}
@Override
public RowReferenceSet deleteRows(Long userId, RowSelection rowsToDelete) throws DatastoreException, NotFoundException, IOException {
Validate.required(rowsToDelete, "rowsToDelete");
Validate.required(rowsToDelete.getTableId(), "rowsToDelete.tableId");
UserInfo user = userManager.getUserInfo(userId);
return tableEntityManager.deleteRows(user, rowsToDelete.getTableId(), rowsToDelete);
}
@Override
public TableFileHandleResults getFileHandles(Long userId, RowReferenceSet fileHandlesToFind) throws IOException, NotFoundException {
Validate.required(fileHandlesToFind, "fileHandlesToFind");
Validate.required(fileHandlesToFind.getTableId(), "fileHandlesToFind.tableId");
UserInfo userInfo = userManager.getUserInfo(userId);
List<ColumnModel> columns = columnModelManager.getCurrentColumns(userInfo, fileHandlesToFind.getTableId(), fileHandlesToFind.getHeaders());
for (ColumnModel cm : columns) {
if (cm != null
&& cm.getColumnType() != ColumnType.FILEHANDLEID) {
throw new IllegalArgumentException("Column " + cm.getId() + " is not of type FILEHANDLEID");
}
}
RowSet rowSet = tableEntityManager.getCellValues(userInfo, fileHandlesToFind.getTableId(), fileHandlesToFind.getRows(), columns);
// we expect there to be null entries, but the file handle manager does not
List<String> idsList = Lists.newArrayListWithCapacity(columns.size() * rowSet.getRows().size());
for (Row row : rowSet.getRows()) {
if(row.getValues() != null){
for (String id : row.getValues()) {
if (id != null) {
idsList.add(id);
}
}
}
}
Map<String, FileHandle> fileHandles = fileHandleManager.getAllFileHandlesBatch(idsList);
TableFileHandleResults results = new TableFileHandleResults();
results.setTableId(fileHandlesToFind.getTableId());
results.setHeaders(fileHandlesToFind.getHeaders());
results.setRows(Lists.<FileHandleResults> newArrayListWithCapacity(rowSet.getRows().size()));
// insert the file handles in order. Null ids will give null file handles
for (Row row : rowSet.getRows()) {
if(row.getValues() != null){
FileHandleResults rowHandles = new FileHandleResults();
rowHandles.setList(Lists.<FileHandle> newArrayListWithCapacity(columns.size()));
for (String id : row.getValues()) {
FileHandle fh;
if (id != null) {
fh = fileHandles.get(id);
} else {
fh = null;
}
rowHandles.getList().add(fh);
}
results.getRows().add(rowHandles);
}
}
return results;
}
@Override
public String getFileRedirectURL(Long userId, String tableId, RowReference rowRef, String columnId) throws IOException, NotFoundException {
Validate.required(columnId, "columnId");
Validate.required(userId, "userId");
String fileHandleId = getFileHandleId(userId, tableId, rowRef, columnId);
// Use the FileHandle ID to get the URL
return fileHandleManager.getRedirectURLForFileHandle(fileHandleId);
}
@Override
public String getFilePreviewRedirectURL(Long userId, String tableId, RowReference rowRef, String columnId) throws IOException,
NotFoundException {
Validate.required(columnId, "columnId");
Validate.required(userId, "userId");
String fileHandleId = getFileHandleId(userId, tableId, rowRef, columnId);
// Use the FileHandle ID to get the URL
String previewFileHandleId = fileHandleManager.getPreviewFileHandleId(fileHandleId);
return fileHandleManager.getRedirectURLForFileHandle(previewFileHandleId);
}
/**
* Get the file handle ID for a given table cell.
*
* @param userId
* @param tableId
* @param rowRef
* @param columnId
* @return
* @throws IOException
*/
public String getFileHandleId(Long userId, String tableId,
RowReference rowRef, String columnId) throws IOException {
// Get the file handles
UserInfo userInfo = userManager.getUserInfo(userId);
ColumnModel model = columnModelManager.getColumnModel(userInfo, columnId);
if (model.getColumnType() != ColumnType.FILEHANDLEID) {
throw new IllegalArgumentException("Column " + columnId + " is not of type FILEHANDLEID");
}
Row row = tableEntityManager.getCellValue(userInfo, tableId, rowRef, model);
if(row == null || row.getValues() == null || row.getValues().size() != 1){
throw new NotFoundException("Row: "+rowRef.getRowId());
}
return row.getValues().get(0);
}
@Override
public Long getMaxRowsPerPage(List<ColumnModel> models) {
return tableQueryManager.getMaxRowsPerPage(models);
}
@Override
public List<ColumnModel> getDefaultViewColumnsForType(ViewType viewType) {
return tableManagerSupport.getDefaultTableViewColumns(viewType);
}
@Override
public ColumnModelPage getPossibleColumnModelsForView(String viewId, String nextPageToken){
return connectionFactory.connectToFirstIndex().getPossibleColumnModelsForView(viewId, nextPageToken);
}
@Override
public ColumnModelPage getPossibleColumnModelsForScopeIds(List<String> scopeIds, ViewType type, String nextPageToken){
return connectionFactory.connectToFirstIndex().getPossibleColumnModelsForScope(scopeIds, type, nextPageToken);
}
}
| |
// Copyright (c) 2011, Chute Corporation. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of the Chute Corporation nor the names
// of its contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
// OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
//
package com.chute.sdk.v2.api.asset;
import com.chute.sdk.v2.api.base.PageRequest;
import com.chute.sdk.v2.api.parsers.ListResponseParser;
import com.chute.sdk.v2.api.upload.UploadProgressListener;
import com.chute.sdk.v2.model.AlbumModel;
import com.chute.sdk.v2.model.AssetModel;
import com.chute.sdk.v2.model.PaginationModel;
import com.chute.sdk.v2.model.response.ListResponseModel;
import com.chute.sdk.v2.model.response.ResponseModel;
import com.dg.libs.rest.HttpRequest;
import com.dg.libs.rest.callbacks.HttpCallback;
import com.dg.libs.rest.client.RequestMethod;
import java.util.HashMap;
/**
* The {@link GCAssets} class is a helper class that consists static methods
* used for managing {@link AssetModel}.
* <p>
* The API enables users and developers to access the following methods:
* <ul>
* <li>Get asset from a specific album
* <li>Get list of assets from a specific album
* <li>Update asset's caption
* <li>Get asset's exif information
* <li>Delete an asset
* <li>Upload an asset
* </ul>
*/
public class GCAssets {
@SuppressWarnings("unused")
private static final String TAG = GCAssets.class.getSimpleName();
/**
* Private no-args default constructor.
*/
private GCAssets() {
super();
}
/**
* Gets exif info for an asset.
* <p>
* Empty if there are no available exif parameters.
*
* @param asset
* Asset containing exif data to be retrieved.
* @param callback
* Instance of {@link HttpCallback} interface. If successful, the
* callback returns {@link ResponseModel<AssetModel>}.
* @return {@link AssetsExifRequest}.
*/
public static HttpRequest exif(
final AssetModel asset,
final HttpCallback<ResponseModel<HashMap<String, String>>> callback) {
return new AssetsExifRequest(asset, callback);
}
/**
* Deletes an asset using its ID.
*
* @param asset
* The asset to be removed.
* @param callback
* Instance of {@link HttpCallback} interface. If successful, the
* callback returns {@link ResponseModel<AssetModel>}.
* @return {@link AssetsDeleteRequest}.
*/
public static HttpRequest delete(
final AlbumModel album, final AssetModel asset,
final HttpCallback<ResponseModel<AssetModel>> callback) {
return new AssetsDeleteRequest(album, asset, callback);
}
/**
* Updates the caption (description text) on an asset.
*
* @param asset
* The asset to be updated.
* @param callback
* Instance of {@link HttpCallback} interface. If successful, the
* callback returns {@link ResponseModel<AssetModel>}.
* @return {@link AssetsUpdateRequest}.
*/
public static HttpRequest update(
final AlbumModel album, final AssetModel asset,
final HttpCallback<ResponseModel<AssetModel>> callback) {
return new AssetsUpdateRequest(album, asset, callback);
}
/**
* Gets a specific asset from a given album.
*
* @param album
* Album whose asset is demanded.
* @param asset
* The requested asset.
* @param callback
* Instance of {@link HttpCallback} interface. If successful, the
* callback returns {@link ResponseModel<AssetModel>}.
* @return {@link AlbumsGetAssetRequest}.
*/
public static HttpRequest get(
final AlbumModel album, final AssetModel asset,
final HttpCallback<ResponseModel<AssetModel>> callback) {
return new AlbumsGetAssetRequest(album, asset, callback);
}
/**
* Gets a list of assets from a specific album.
*
* @param album
* The album whose assets are being retrieved.
* @param pagination
* Instance of {@link PaginationModel} indicating number of
* assets per page.
* @param callback
* Instance of {@link HttpCallback} interface. If successful, the
* callback returns {@link ListResponseModel<AssetModel>}.
* @return {@link AlbumsGetAssetListRequest}.
*/
public static HttpRequest list(
final AlbumModel album, PaginationModel pagination,
final HttpCallback<ListResponseModel<AssetModel>> callback) {
return new AlbumsGetAssetListRequest(album, pagination,
callback);
}
public static HttpRequest getNextPageOfAssets(
PaginationModel model,
final HttpCallback<ListResponseModel<AssetModel>> callback) {
return new PageRequest<ListResponseModel<AssetModel>>(
RequestMethod.GET, model.getNextPage(),
new ListResponseParser<AssetModel>(AssetModel.class), callback);
}
/**
* Overloaded method
*
* @see #list(AlbumModel, PaginationModel, HttpCallback)
*
* @param album
* @param callback
* @return
*/
public static HttpRequest list(
final AlbumModel album,
final HttpCallback<ListResponseModel<AssetModel>> callback) {
return new AlbumsGetAssetListRequest(album,
new PaginationModel(), callback);
}
/**
* Uploads an asset.
* <p>
* The upload creates a file out of asset's path and returns the uploaded
* {@link AssetModel}.
*
* @param uploadListener
* Instance of {@link UploadProgressListener} used for tracking
* the upload progress
* @param album
* Album holding the uploaded asset.
* @param filePath
* Asset's file path.
* @param callback
* Instance of {@link HttpCallback} interface. If successful, the
* callback returns {@link ListResponseModel<AssetModel>}.
* @return {@link AssetsFileUploadRequest}.
*/
public static HttpRequest upload(
final UploadProgressListener uploadListener,
final AlbumModel album, final String filePath,
final HttpCallback<ListResponseModel<AssetModel>> callback) {
return new AssetsFileUploadRequest(filePath, album,
uploadListener, callback);
}
/**
* Moves the specified asset from one album to another.
*
* @param album
* Album holding the asset to be moved.
* @param asset
* Asset you wish to move to another album.
* @param newAlbum
* Album that is going to store the moved asset.
* @param callback
* Instance of {@link HttpCallback} interface. If successful, the
* callback returns {@link ResponseModel<AssetModel>}.
* @return {@link AssetsMoveRequest}
*/
public static HttpRequest move(
final AlbumModel album, final AssetModel asset,
final AlbumModel newAlbum,
final HttpCallback<ResponseModel<AssetModel>> callback) {
return new AssetsMoveRequest(album, asset, newAlbum, callback);
}
/**
* Copies the specified asset from one album to another.
*
* @param album
* Album holding the asset to be copied.
* @param asset
* Asset you wish to copy to another album
* @param newAlbum
* Album that is going to store the copied asset.
* @param callback
* Instance of {@link HttpCallback} interface. If successful, the
* callback returns {@link ResponseModel<AssetModel>}.
* @return {@link AssetsCopyRequest}
*/
public static HttpRequest copy(
final AlbumModel album, final AssetModel asset,
final AlbumModel newAlbum,
final HttpCallback<ResponseModel<AssetModel>> callback) {
return new AssetsCopyRequest(album, asset, newAlbum, callback);
}
}
| |
package eu.chargetime.ocpp.model.test;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import eu.chargetime.ocpp.PropertyConstraintException;
import eu.chargetime.ocpp.model.core.BootNotificationRequest;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
/*
ChargeTime.eu - Java-OCA-OCPP
Copyright (C) 2015-2016 Thomas Volden <tv@chargetime.eu>
MIT License
Copyright (C) 2016-2018 Thomas Volden
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
public class BootNotificationRequestTest {
private static final String EXCEPTION_MESSAGE_TEMPLATE =
"Validation failed: [Exceeded limit of %s chars]. Current Value: [%s]";
@Rule public ExpectedException thrownException = ExpectedException.none();
private BootNotificationRequest request;
private String stringLength20 = "12345678901234567890";
private String stringLength21 = "123456789012345678901";
private String stringLength25 = "1234567890123456789012345";
private String stringLength26 = "12345678901234567890123456";
@Before
public void setUp() {
request = new BootNotificationRequest();
}
@Deprecated
@Test
public void setChargeBoxSerialNumber_stringLength25_noExceptions() {
// When
request.setChargeBoxSerialNumber(stringLength25);
// Then
assertThat(request.getChargeBoxSerialNumber(), equalTo(stringLength25));
}
@Deprecated
@Test
public void setChargeBoxSerialNumber_stringLength26_throwsPropertyConstraintException() {
setExpectedPropertyConstraintException(createExpectedExceptionMessage(25, 26));
request.setChargeBoxSerialNumber(stringLength26);
}
private void setExpectedPropertyConstraintException(String expectedExceptionMessage) {
thrownException.expect(instanceOf(PropertyConstraintException.class));
thrownException.expectMessage(equalTo(expectedExceptionMessage));
}
@Test
public void setChargePointModel_stringLength20_noExceptions() {
// When
request.setChargePointModel(stringLength20);
// Then
assertThat(request.getChargePointModel(), equalTo(stringLength20));
}
@Test
public void setChargePointModel_stringLength21_throwsPropertyConstraintException() {
setExpectedPropertyConstraintException(createExpectedExceptionMessage(20, 21));
request.setChargePointModel(stringLength21);
}
@Test
public void setChargePointSerialNumber_stringLength25_noExceptions() {
// When
request.setChargePointSerialNumber(stringLength25);
// Then
assertThat(request.getChargePointSerialNumber(), equalTo(stringLength25));
}
@Test
public void setChargePointSerialNumber_stringLength26_throwsPropertyConstraintException() {
setExpectedPropertyConstraintException(createExpectedExceptionMessage(25, 26));
request.setChargePointSerialNumber(stringLength26);
}
@Test
public void setChargePointVendor_stringLength20_noExceptions() {
// When
request.setChargePointVendor(stringLength20);
// Then
assertThat(request.getChargePointVendor(), equalTo(stringLength20));
}
@Test
public void setChargePointVendor_stringLength21_throwsPropertyConstraintException() {
setExpectedPropertyConstraintException(createExpectedExceptionMessage(20, 21));
request.setChargePointVendor(stringLength21);
}
@Test
public void setFirmwareVersion_stringLength50_noException() {
// Given
String stringLength50 = "12345678901234567890123456789012345678901234567890";
// When
request.setFirmwareVersion(stringLength50);
// Then
assertThat(request.getFirmwareVersion(), equalTo(stringLength50));
}
@Test
public void setFirmwareVersion_stringLength51_throwsPropertyConstraintException() {
setExpectedPropertyConstraintException(createExpectedExceptionMessage(50, 51));
String stringLength51 = "123456789012345678901234567890123456789012345678901";
request.setFirmwareVersion(stringLength51);
}
@Test
public void setIccid_stringLength20_noExceptions() {
// When
request.setIccid(stringLength20);
// Then
assertThat(request.getIccid(), equalTo(stringLength20));
}
@Test
public void setIccid_stringLength21_throwsPropertyConstraintException() {
setExpectedPropertyConstraintException(createExpectedExceptionMessage(20, 21));
request.setIccid(stringLength21);
}
@Test
public void setImsi_stringLength20_noExceptions() {
// When
request.setImsi(stringLength20);
// Then
assertThat(request.getImsi(), equalTo(stringLength20));
}
@Test
public void setImsi_stringLength21_throwsPropertyConstraintException() {
setExpectedPropertyConstraintException(createExpectedExceptionMessage(20, 21));
request.setImsi(stringLength21);
}
@Test
public void setMeterSerialNumber_stringLength25_noExceptions() {
// When
request.setMeterSerialNumber(stringLength25);
// Then
assertThat(request.getMeterSerialNumber(), equalTo(stringLength25));
}
@Test
public void setMeterSerialNumber_stringLength26_throwsPropertyConstraintException() {
setExpectedPropertyConstraintException(createExpectedExceptionMessage(25, 26));
request.setMeterSerialNumber(stringLength26);
}
@Test
public void setMeterType_stringLength25_noExceptions() {
// When
request.setMeterType(stringLength25);
// Then
assertThat(request.getMeterType(), equalTo(stringLength25));
}
@Test
public void setMeterType_stringLength26_throwsPropertyConstraintException() {
setExpectedPropertyConstraintException(createExpectedExceptionMessage(25, 26));
request.setMeterType(stringLength26);
}
@Test
public void validate_chargePointVendorAndChargePointModelIsSet_returnsTrue() {
// Given
request.setChargePointModel(stringLength20);
request.setChargePointVendor(stringLength20);
// When
boolean isValid = request.validate();
// Then
assertThat(isValid, is(true));
}
@Test
public void validate_nothingIsSet_returnsFalse() {
// When
boolean isValid = request.validate();
// Then
assertThat(isValid, is(false));
}
@Test
public void isTransactionRelated_returnsFalse() {
// When
boolean isTransactionRelated = request.transactionRelated();
// Then
assertThat(isTransactionRelated, is(false));
}
private static String createExpectedExceptionMessage(int maxAllowedLength, int currentLength) {
return String.format(EXCEPTION_MESSAGE_TEMPLATE, maxAllowedLength, currentLength);
}
}
| |
/*
* RgbVal.java
*
* Created on September 9, 2006, 10:42 AM
*
* To change this template, choose Tools | Template Manager
* and open the template in the editor.
*
* Copyright 2006 by Jon A. Webb
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the Lesser GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package jjil.core;
/**
* Helper class for manipulating RGB values. All functions are static.
*
* @author webb
*/
public class RgbVal {
// for translating unsigned int values to signed byte values with the
// same bit field. I can't think of a simpler way to do this.
public final static byte[] unsignedIntToSignedByte = { (byte) 0, (byte) 1, (byte) 2,
(byte) 3, (byte) 4, (byte) 5, (byte) 6, (byte) 7, (byte) 8,
(byte) 9, (byte) 10, (byte) 11, (byte) 12, (byte) 13, (byte) 14,
(byte) 15, (byte) 16, (byte) 17, (byte) 18, (byte) 19, (byte) 20,
(byte) 21, (byte) 22, (byte) 23, (byte) 24, (byte) 25, (byte) 26,
(byte) 27, (byte) 28, (byte) 29, (byte) 30, (byte) 31, (byte) 32,
(byte) 33, (byte) 34, (byte) 35, (byte) 36, (byte) 37, (byte) 38,
(byte) 39, (byte) 40, (byte) 41, (byte) 42, (byte) 43, (byte) 44,
(byte) 45, (byte) 46, (byte) 47, (byte) 48, (byte) 49, (byte) 50,
(byte) 51, (byte) 52, (byte) 53, (byte) 54, (byte) 55, (byte) 56,
(byte) 57, (byte) 58, (byte) 59, (byte) 60, (byte) 61, (byte) 62,
(byte) 63, (byte) 64, (byte) 65, (byte) 66, (byte) 67, (byte) 68,
(byte) 69, (byte) 70, (byte) 71, (byte) 72, (byte) 73, (byte) 74,
(byte) 75, (byte) 76, (byte) 77, (byte) 78, (byte) 79, (byte) 80,
(byte) 81, (byte) 82, (byte) 83, (byte) 84, (byte) 85, (byte) 86,
(byte) 87, (byte) 88, (byte) 89, (byte) 90, (byte) 91, (byte) 92,
(byte) 93, (byte) 94, (byte) 95, (byte) 96, (byte) 97, (byte) 98,
(byte) 99, (byte) 100, (byte) 101, (byte) 102, (byte) 103,
(byte) 104, (byte) 105, (byte) 106, (byte) 107, (byte) 108,
(byte) 109, (byte) 110, (byte) 111, (byte) 112, (byte) 113,
(byte) 114, (byte) 115, (byte) 116, (byte) 117, (byte) 118,
(byte) 119, (byte) 120, (byte) 121, (byte) 122, (byte) 123,
(byte) 124, (byte) 125, (byte) 126, (byte) 127, (byte) -255,
(byte) -254, (byte) -253, (byte) -252, (byte) -251, (byte) -250,
(byte) -249, (byte) -248, (byte) -247, (byte) -246, (byte) -245,
(byte) -244, (byte) -243, (byte) -242, (byte) -241, (byte) -240,
(byte) -239, (byte) -238, (byte) -237, (byte) -236, (byte) -235,
(byte) -234, (byte) -233, (byte) -232, (byte) -231, (byte) -230,
(byte) -229, (byte) -228, (byte) -227, (byte) -226, (byte) -225,
(byte) -224, (byte) -223, (byte) -222, (byte) -221, (byte) -220,
(byte) -219, (byte) -218, (byte) -217, (byte) -216, (byte) -215,
(byte) -214, (byte) -213, (byte) -212, (byte) -211, (byte) -210,
(byte) -209, (byte) -208, (byte) -207, (byte) -206, (byte) -205,
(byte) -204, (byte) -203, (byte) -202, (byte) -201, (byte) -200,
(byte) -199, (byte) -198, (byte) -197, (byte) -196, (byte) -195,
(byte) -194, (byte) -193, (byte) -192, (byte) -191, (byte) -190,
(byte) -189, (byte) -188, (byte) -187, (byte) -186, (byte) -185,
(byte) -184, (byte) -183, (byte) -182, (byte) -181, (byte) -180,
(byte) -179, (byte) -178, (byte) -177, (byte) -176, (byte) -175,
(byte) -174, (byte) -173, (byte) -172, (byte) -171, (byte) -170,
(byte) -169, (byte) -168, (byte) -167, (byte) -166, (byte) -165,
(byte) -164, (byte) -163, (byte) -162, (byte) -161, (byte) -160,
(byte) -159, (byte) -158, (byte) -157, (byte) -156, (byte) -155,
(byte) -154, (byte) -153, (byte) -152, (byte) -151, (byte) -150,
(byte) -149, (byte) -148, (byte) -147, (byte) -146, (byte) -145,
(byte) -144, (byte) -143, (byte) -142, (byte) -141, (byte) -140,
(byte) -139, (byte) -138, (byte) -137, (byte) -136, (byte) -135,
(byte) -134, (byte) -133, (byte) -132, (byte) -131, (byte) -130,
(byte) -129, (byte) -128, (byte) -127, (byte) -126, (byte) -125,
(byte) -124, (byte) -123, (byte) -122, (byte) -121, (byte) -120,
(byte) -119, (byte) -118, (byte) -117, (byte) -116, (byte) -115,
(byte) -114, (byte) -113, (byte) -112, (byte) -111, (byte) -110,
(byte) -109, (byte) -108, (byte) -107, (byte) -106, (byte) -105,
(byte) -104, (byte) -103, (byte) -102, (byte) -101, (byte) -100,
(byte) -99, (byte) -98, (byte) -97, (byte) -96, (byte) -95,
(byte) -94, (byte) -93, (byte) -92, (byte) -91, (byte) -90,
(byte) -89, (byte) -88, (byte) -87, (byte) -86, (byte) -85,
(byte) -84, (byte) -83, (byte) -82, (byte) -81, (byte) -80,
(byte) -79, (byte) -78, (byte) -77, (byte) -76, (byte) -75,
(byte) -74, (byte) -73, (byte) -72, (byte) -71, (byte) -70,
(byte) -69, (byte) -68, (byte) -67, (byte) -66, (byte) -65,
(byte) -64, (byte) -63, (byte) -62, (byte) -61, (byte) -60,
(byte) -59, (byte) -58, (byte) -57, (byte) -56, (byte) -55,
(byte) -54, (byte) -53, (byte) -52, (byte) -51, (byte) -50,
(byte) -49, (byte) -48, (byte) -47, (byte) -46, (byte) -45,
(byte) -44, (byte) -43, (byte) -42, (byte) -41, (byte) -40,
(byte) -39, (byte) -38, (byte) -37, (byte) -36, (byte) -35,
(byte) -34, (byte) -33, (byte) -32, (byte) -31, (byte) -30,
(byte) -29, (byte) -28, (byte) -27, (byte) -26, (byte) -25,
(byte) -24, (byte) -23, (byte) -22, (byte) -21, (byte) -20,
(byte) -19, (byte) -18, (byte) -17, (byte) -16, (byte) -15,
(byte) -14, (byte) -13, (byte) -12, (byte) -11, (byte) -10,
(byte) -9, (byte) -8, (byte) -7, (byte) -6, (byte) -5, (byte) -4,
(byte) -3, (byte) -2, (byte) -1 };
/**
* Converts byte R, G, and B values to an ARGB word. byte is a signed data
* type but the ARGB word has unsigned bit fields. In other words the
* minimum byte value is Byte.MIN_VALUE but the color black in the ARGB word
* is represented as 0x00. So we must subtract Byte.MIN_VALUE to get an
* unsigned byte value before shifting and combining the bit fields.
*
* @param R
* input signed red byte
* @param G
* input signed green byte
* @param B
* input signed blue byte
* @return the color ARGB word.
*/
public static int toRgb(byte R, byte G, byte B) {
return 0xFF000000 | (toUnsignedInt(R) << 16) | (toUnsignedInt(G) << 8)
| toUnsignedInt(B);
}
/**
* Compare two RgbVals in absolute value.
*
* @return sum of absolute differences between pixel values
*/
public static int getAbsDiff(int rgb1, int rgb2) {
return Math.abs(RgbVal.getR(rgb1) - RgbVal.getR(rgb2))
+ Math.abs(RgbVal.getG(rgb1) - RgbVal.getG(rgb2))
+ Math.abs(RgbVal.getB(rgb1) - RgbVal.getB(rgb2));
}
/**
* Compare two RgbVals in maximum difference in any band.
*
* @return maximum difference between pixel values in any band
*/
/**
* Computes maximum difference (largest difference in color, R, G, or B) of
* two color values.
*
* @param ARGB1
* first color
* @param ARGB2
* second color
* @return largest difference. Will always be >= 0, <= 256.
*/
public static int getMaxDiff(int ARGB1, int ARGB2) {
int nR1 = RgbVal.getR(ARGB1);
int nG1 = RgbVal.getG(ARGB1);
int nB1 = RgbVal.getB(ARGB1);
int nR2 = RgbVal.getR(ARGB2);
int nG2 = RgbVal.getG(ARGB2);
int nB2 = RgbVal.getB(ARGB2);
return Math.max(Math.abs(nR1 - nR2),
Math.max(Math.abs(nG1 - nG2), Math.abs(nB1 - nB2)));
}
public static int getProportionateDiff(int ARGB1, int ARGB2) {
int nR1 = RgbVal.getR(ARGB1) - Byte.MIN_VALUE;
int nG1 = RgbVal.getG(ARGB1) - Byte.MIN_VALUE;
int nB1 = RgbVal.getB(ARGB1) - Byte.MIN_VALUE;
int nR2 = RgbVal.getR(ARGB2) - Byte.MIN_VALUE;
int nG2 = RgbVal.getG(ARGB2) - Byte.MIN_VALUE;
int nB2 = RgbVal.getB(ARGB2) - Byte.MIN_VALUE;
// We're solving the equation
// min/r ((r*nR1 - nR2) + (r*nG1 - nG2) + (r*nB1 - nB2))**2
// which gives 2*((r*nR1 - nR2)*nR1 + (r*nG1 - nG2)*nG1 + (r*nB1 -
// nB2)*nB1) = 0
// or r = (nR1*nR2 + nG1*nG2 + nB1*nB2) / (nR1*nR1 + nG1*nG1 + nB1*nB1)
// we divide r into nNum / nDenom to avoid floating point
int nNum = (nR1 * nR2 + nG1 * nG2 + nB1 * nB2);
int nDenom = (nR1 * nR1 + nG1 * nG1 + nB1 * nB1);
if (nDenom == 0) {
return 3 * Byte.MAX_VALUE;
}
// the error is then ((r*nR1 - nR2) + (r*nG1 - nG2) + (r*nB1 - nB2))**2
// or (r*(nR1 + nG1 + nB1) - (nR2 + nB2 + nG2))**2
// or ((nNum*(nR1 + nG1 + nB1) - nDenom*(nR1 + nG2 + nB2)) / nDenom)**2
// or ((nNum*(nR1 + nG1 + nB1) - nDenom*(nR1 + nG2 + nB2))**2 /
// nDenom**2
return MathPlus.square(8 * (nNum * (nR1 + nG1 + nB1) - nDenom
* (nR1 + nG2 + nB2)))
/ MathPlus.square(nDenom);
}
/**
* Compare two RgbVals in sum of squares difference.
*
* @return sum of squares differences between pixel values
*/
public static int getSqrDiff(int rgb1, int rgb2) {
return MathPlus.square(RgbVal.getR(rgb1) - RgbVal.getR(rgb2))
+ MathPlus.square(RgbVal.getG(rgb1) - RgbVal.getG(rgb2))
+ MathPlus.square(RgbVal.getB(rgb1) - RgbVal.getB(rgb2));
}
/**
* Extracts blue byte from input ARGB word. The bit fields in ARGB word are
* unsigned, ranging from 0x00 to 0xff. To convert these to the returned
* signed byte value we must add Byte.MIN_VALUE.
*
* @return the blue byte value, converted to a signed byte
* @param ARGB
* the input color ARGB word.
*/
public static byte getB(int ARGB) {
return toSignedByte((byte) (ARGB & 0xff));
}
/**
* Extracts green byte from input ARGB word. The bit fields in ARGB word are
* unsigned, ranging from 0x00 to 0xff. To convert these to the returned
* signed byte value we must add Byte.MIN_VALUE.
*
* @param ARGB
* the input color ARGB word.
* @return the green byte value, converted to a signed byte
*/
public static byte getG(int ARGB) {
return toSignedByte((byte) ((ARGB >> 8) & 0xff));
}
/**
* Extracts red byte from input ARGB word. The bit fields in ARGB word are
* unsigned, ranging from 0x00 to 0xff. To convert these to the returned
* signed byte value we must add Byte.MIN_VALUE.
*
* @param ARGB
* the input color ARGB word.
* @return the red byte value, converted to a signed byte
*/
public static byte getR(int ARGB) {
return toSignedByte((byte) ((ARGB >> 16) & 0xff));
}
/**
* Return "vector" difference of Rgb values. Treating each Rgb value as a
* 3-element vector form the value (ARGB-ARGBTarg) . ARGBVec where . is dot
* product. Useful for determining whether an Rgb value is near another
* weighted the different channels differently.
*
* @param ARGB
* tested Rgb value
* @param ARGBTarg
* target Rgb value
* @param ARGBVec
* weighting
* @return (ARGB-ARGBTarg) . ARGBVec where . is dot product and the Rgb
* values are treated as 3-vectors.
*/
public static int getVecDiff(int ARGB, int ARGBTarg, int ARGBVec) {
int nR1 = RgbVal.getR(ARGB);
int nG1 = RgbVal.getG(ARGB);
int nB1 = RgbVal.getB(ARGB);
int nR2 = RgbVal.getR(ARGBTarg);
int nG2 = RgbVal.getG(ARGBTarg);
int nB2 = RgbVal.getB(ARGBTarg);
int nR3 = RgbVal.getR(ARGBVec);
int nG3 = RgbVal.getG(ARGBVec);
int nB3 = RgbVal.getB(ARGBVec);
return (nR1 - nR2) * nR3 + (nG1 - nG2) * nG3 + (nB1 - nB2) * nB3;
}
/**
* Converts from an unsigned bit field (as stored in an ARGB word to a
* signed byte value (that we can do computation on).
*
* @return the signed byte value
* @param b
* the unsigned byte value.
*/
public static byte toSignedByte(byte b) {
return (byte) (b + Byte.MIN_VALUE);
}
/**
* Converts from a signed byte value (which we do computation on) to an
* unsigned bit field (as stored in an ARGB word). The result is returned as
* an int because the unsigned 8 bit value cannot be represented as a byte.
*
* @return the unsigned bit field
* @param b
* the signed byte value.
*/
public static int toUnsignedInt(byte b) {
return (b - Byte.MIN_VALUE);
}
/**
* Provide a way to turn color values into strings
*
* @param ARGB
* the input color value
* @return a string describing the color
*/
public static String toString(int ARGB) {
return "[" + new Integer(RgbVal.getR(ARGB)).toString() + ","
+ new Integer(RgbVal.getR(ARGB)).toString() + ","
+ new Integer(RgbVal.getB(ARGB)).toString() + "]";
}
}
| |
package ciir.jfoley.chai.collections.util;
import ciir.jfoley.chai.collections.ListBasedOrderedSet;
import ciir.jfoley.chai.collections.Pair;
import ciir.jfoley.chai.collections.list.AChaiList;
import ciir.jfoley.chai.collections.list.IntList;
import ciir.jfoley.chai.fn.PredicateFn;
import ciir.jfoley.chai.fn.TransformFn;
import ciir.jfoley.chai.lang.Module;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.*;
import java.util.function.BiPredicate;
/**
* This module contains a number of functions meant to operate on lists (sometimes it's much easier than the related iterables)
* {@link IterableFns}
* @author jfoley.
*/
public class ListFns extends Module {
/** Specialized lazy List concat */
@Nonnull
public static <T> List<T> lazyConcat(@Nonnull final List<T> first, @Nonnull final List<T> second) {
return new AbstractList<T>() {
@Override
public T get(int i) {
if(i < first.size()) {
return first.get(i);
}
return second.get(i - first.size());
}
@Override
public int size() {
return first.size() + second.size();
}
};
}
/**
* Return a lazily mapped view of a collection.
* @param input the base list
* @param mapper the element-wise mapping function
* @param <B> the new type of elements
* @param <T> the original type of elements
* @return a list view over the mapped collection
*/
@Nonnull
public static <B, T> AChaiList<B> lazyMap(@Nonnull List<T> input, TransformFn<T, B> mapper) {
return new AChaiList<B>() {
@Override
public B get(int index) {
return mapper.transform(input.get(index));
}
@Override
public int size() {
return input.size();
}
};
}
/** Take a view of up to amt items from the front of a list */
@Nonnull
public static <T> List<T> take(@Nonnull List<T> input, int amt) {
return input.subList(0, Math.min(input.size(), amt));
}
/**
* Returns a sliding window of subList views over the input list of size window.
* @param input The input list to iterate over.
* @param window The size of the window.
* @param <T> The type parameter of the input list.
* @return A list of windows to process further.
*/
@Nonnull
public static <T> List<List<T>> sliding(@Nonnull List<T> input, int window) {
List<List<T>> windows = new ArrayList<>(input.size());
for (int start = 0; (start+window-1) < input.size(); start++) {
int end = start + window; // inclusive
windows.add(input.subList(start, end));
}
return windows;
}
/**
* Returns a list of pairs, all pairs, assuming order doesn't matter.
* @param input The input list to split up.
* @param <T> the parameter of the input list.
* @return All unique order-independent pairs in the list.
*/
@Nonnull
public static <T> List<Pair<T,T>> pairs(@Nonnull List<T> input) {
input = ensureRandomAccess(input);
List<Pair<T,T>> output = new ArrayList<>(input.size()*(input.size()-1));
for (int i = 0; i < input.size()-1; i++) {
for (int j = i+1; j < input.size(); j++) {
output.add(Pair.of(input.get(i), input.get(j)));
}
}
return output;
}
/**
* Break a list into pieces in order. Actually creates subList views of the original list.
* {@link java.util.List#subList}
*
* @param input The input list to break into pieces.
* @param splits the number of partitions to make of the original.
* @return A list of subLists.
*/
@Nonnull
public static <T> List<List<T>> partition(@Nonnull List<T> input, int splits) {
if(splits == 1) {
return Collections.singletonList(input);
}
input = ensureRandomAccess(input);
int numberPerSplit = (int) Math.round(input.size() / (double) splits);
List<List<T>> output = new ArrayList<>();
for (int i = 0; i < splits; i++) {
output.add(slice(input, i * numberPerSplit, (i + 1) * numberPerSplit));
}
// fix up a small remainder
if(input.size() > splits * numberPerSplit) {
output.set(splits-1, input.subList((splits-1) * numberPerSplit, input.size()));
}
return output;
}
/**
* Break a list into pieces in order. Actually creates subList views of the original list.
* {@link java.util.List#subList}
*
* @param input The input list to break into pieces.
* @param splits the number of partitions to make of the original.
* @return A list of subLists.
*/
@Nonnull
public static <T> List<List<T>> partitionRoundRobin(@Nonnull List<T> input, int splits) {
input = ensureRandomAccess(input);
List<List<T>> output = new ArrayList<>();
for (int i = 0; i < splits; i++) {
output.add(new ArrayList<T>(input.size() / splits));
}
for (int i = 0; i < input.size(); i++) {
output.get(i%splits).add(input.get(i));
}
return output;
}
/** Copy this list into an ArrayList is if cannot be quickly accessed by index. */
@SuppressWarnings("unchecked")
@Nonnull
public static <T> List<T> ensureRandomAccess(@Nonnull Collection<? extends T> input) {
if(input instanceof RandomAccess && input instanceof List) return (List<T>) input;
return new ArrayList<>(input);
}
@Nonnull
public static <T> List<T> castView(@Nonnull final List<? extends T> input) {
return new AbstractList<T>() {
@Override
public T get(int index) {
return input.get(index);
}
@Override
public int size() {
return input.size();
}
};
}
/**
* Collect an Enumeration into a List, so you can do a for loop like a normal person.
* @param entries The abomination.
* @param <T> The type of contained values.
* @return A list of the contained values. Makes a copy.
*/
@Nonnull
public static <T> List<T> collect(@Nonnull Enumeration<? extends T> entries) {
List<T> results = new ArrayList<>();
while (entries.hasMoreElements()) {
results.add(entries.nextElement());
}
return results;
}
/**
* Append to a shallow copy of this list; immutable .add()
* @param original the input list.
* @param newItem the new item.
* @param <T> the type of items.
* @return a new list containing the original items + the new item.
*/
@Nonnull
public static <T> List<T> pushToCopy(@Nonnull List<T> original, T newItem) {
List<T> newList = new ArrayList<>(original);
newList.add(newItem);
return newList;
}
public static <T> boolean matches(List<T> input, PredicateFn<T> condition) {
return findFirst(input, condition) != null;
}
@Nullable
public static <T> T findFirst(List<T> input, PredicateFn<T> condition) {
for (T t : input) {
if(condition.test(t)) {
return t;
}
}
return null;
}
/**
* Find the max of a collection by a given transform function.
* @param objs the collection.
* @param fn the function that takes an object and returns a comparable property.
* @param <T> the type of objects in the collection.
* @param <V> the type of comparable objects.
* @return the maximum T by V.
*/
@Nullable
public static <T, V extends Comparable<V>> T maxBy(List<? extends T> objs, TransformFn<T,V> fn) {
if(objs.isEmpty()) return null;
T max = objs.get(0);
V maxValue = fn.transform(max);
for (int i = 1; i < objs.size(); i++) {
T t = objs.get(i);
V tv = fn.transform(t);
if(maxValue.compareTo(tv) < 0) {
max = t;
maxValue = tv;
}
}
return max;
}
/**
* Remove duplicates from an input list.
* @param input the list.
* @param <T> the type of elements in the list.
* @return an ordered de-duplication. Might not be as fast as unordered.
*/
@Nonnull
public static <T> List<T> unique(List<? extends T> input) {
return new ListBasedOrderedSet<T>(input).toList();
}
/**
* Joins two collections up to their minimum size, copying into a new list.
* @param lhs the left hand collection.
* @param rhs the right hand collection.
* @param <A> type of left objects.
* @param <B> type of right objects.
* @return a list of pairs of equivalent-indexed elements.
*/
@Nonnull
public static <A,B> List<Pair<A,B>> zip(List<? extends A> lhs, List<? extends B> rhs) {
List<Pair<A,B>> output = new ArrayList<>();
int shared = Math.min(lhs.size(), rhs.size());
for (int i = 0; i < shared; i++) {
output.add(Pair.of(lhs.get(i), rhs.get(i)));
}
return output;
}
/**
* Takes a sublist, whether there are items inside or not, avoiding out-of-bounds errors.
* Note that ends are exclusive in Java's subList and here too.
*
* @param input the list to splice.
* @param start the start (may be negative)
* @param end the end (may be above input.size());
* @param <T> the type of the list.
* @return a sublist approximating the request as best as possible.
*/
@Nonnull
public static <T> List<T> slice(@Nonnull List<T> input, int start, int end) {
int realStart = Math.min(Math.max(0, start), input.size());
int realEnd = Math.min(end, input.size());
return input.subList(realStart, realEnd);
}
/**
* Takes a sublist, whether there are items inside or not, avoiding out-of-bounds errors.
* Note that ends are exclusive in Java's subList and here too.
*
* @param input the list to splice.
* @param start the start (may be negative)
* @param <T> the type of the list.
* @return a sublist approximating the request as best as possible.
*/
@Nonnull
public static <T> List<T> slice(@Nonnull List<T> input, int start) {
int realEnd = input.size();
int realStart = Math.min(Math.max(0, start), realEnd);
return input.subList(realStart, realEnd);
}
/**
* When you have some set of items and you want to repeat them until you have at least X of them.
* @param input possibly small number of items.
* @param wanted the number of items to cycle these to at least.
* @param <T> the item type.
* @return a list of input repeated 0+ times so that the total length >= wanted.
*/
@Nonnull
public static <T> List<T> repeatUntilAtLeast(@Nonnull List<T> input, int wanted) {
if(input.size() == 0) throw new IllegalArgumentException("Can't repeat zero items!");
if(input.size() >= wanted) return input;
ArrayList<T> output = new ArrayList<>(wanted);
while(output.size() < wanted) {
output.addAll(input);
}
return output;
}
@Nullable
public static <T> T getLast(List<T> input) {
if(input.isEmpty()) return null;
return input.get(input.size() - 1);
}
@Nonnull
public static <B, T> ArrayList<B> map(@Nonnull Collection<T> input, @Nonnull TransformFn<T, B> mapper) {
ArrayList<B> output = new ArrayList<>(input.size());
for (T x : input) {
output.add(mapper.transform(x));
}
return output;
}
public static <T> ArrayList<T> fill(int size, @Nonnull TransformFn<Integer, T> mapper) {
ArrayList<T> output = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
output.add(mapper.transform(i));
}
return output;
}
public static <T> IntList findAll(List<T> haystack, List<T> needle, BiPredicate<T,T> isEqualsFn) {
IntList hits = new IntList();
if(needle.isEmpty()) return hits;
final T firstQ = needle.get(0);
for (int i = 0; i < haystack.size(); i++) {
if(isEqualsFn.test(firstQ, haystack.get(i))) {
boolean matches = true;
int k = 1;
for (int j = i+1; j < haystack.size() && k < needle.size(); j++, k++) {
if(!isEqualsFn.test(haystack.get(j), needle.get(k))) {
matches = false;
break;
}
}
if(matches && k == needle.size()) {
hits.add(i);
}
}
}
return hits;
}
public static <T> IntList findAll(List<T> haystack, List<T> needle) {
return findAll(haystack, needle, Objects::equals);
}
public interface GroupHandler<K,V> {
void onStartKey(@Nonnull K key);
void onValue(V value);
void onEndKey(@Nonnull K key);
}
public static <T, K> void changeDetect(Iterable<T> input, TransformFn<T, K> keyFn, GroupHandler<K,T> groupFn) {
K prev = null;
for (T t : input) {
K curr = keyFn.transform(t);
if(!Objects.equals(curr, prev)) {
if(prev != null) {
groupFn.onEndKey(prev);
}
prev = curr;
groupFn.onStartKey(curr);
}
groupFn.onValue(t);
}
if(prev != null) {
groupFn.onEndKey(prev);
}
}
}
| |
package org.batfish.representation.juniper;
import com.google.common.annotations.VisibleForTesting;
import java.io.Serializable;
import java.util.LinkedHashSet;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/** Represents all {@link PsFrom} statements in a single {@link PsTerm} */
public final class PsFroms implements Serializable {
private boolean _atLeastOneFrom = false;
private final Set<PsFromAsPath> _fromAsPaths;
private PsFromColor _fromColor;
private final Set<PsFromCommunity> _fromCommunities;
private final Set<PsFromCondition> _fromConditions;
private PsFromFamily _fromFamily;
private PsFromInstance _fromInstance;
private final Set<PsFromInterface> _fromInterfaces;
private PsFromLocalPreference _fromLocalPreference;
private PsFromMetric _fromMetric;
private final Set<PsFromPolicyStatement> _fromPolicyStatements;
private final Set<PsFromPolicyStatementConjunction> _fromPolicyStatementConjunctions;
private final Set<PsFromPrefixList> _fromPrefixLists;
private final Set<PsFromPrefixListFilterLonger> _fromPrefixListFilterLongers;
private final Set<PsFromPrefixListFilterOrLonger> _fromPrefixListFilterOrLongers;
private final Set<PsFromProtocol> _fromProtocols;
private final Set<PsFromRouteFilter> _fromRouteFilters;
private final Set<PsFromTag> _fromTags;
private PsFromUnsupported _fromUnsupported;
PsFroms() {
_fromAsPaths = new LinkedHashSet<>();
_fromCommunities = new LinkedHashSet<>();
_fromConditions = new LinkedHashSet<>();
_fromInterfaces = new LinkedHashSet<>();
_fromPolicyStatements = new LinkedHashSet<>();
_fromPolicyStatementConjunctions = new LinkedHashSet<>();
_fromPrefixLists = new LinkedHashSet<>();
_fromPrefixListFilterLongers = new LinkedHashSet<>();
_fromPrefixListFilterOrLongers = new LinkedHashSet<>();
_fromProtocols = new LinkedHashSet<>();
_fromRouteFilters = new LinkedHashSet<>();
_fromTags = new LinkedHashSet<>();
}
public void addFromAsPath(@Nonnull PsFromAsPath fromAsPath) {
_atLeastOneFrom = true;
_fromAsPaths.add(fromAsPath);
}
public void addFromCommunity(@Nonnull PsFromCommunity fromCommunity) {
_atLeastOneFrom = true;
_fromCommunities.add(fromCommunity);
}
public void addFromCondition(@Nonnull PsFromCondition fromCondition) {
_atLeastOneFrom = true;
_fromConditions.add(fromCondition);
}
public void addFromInterface(@Nonnull PsFromInterface fromInterface) {
_atLeastOneFrom = true;
_fromInterfaces.add(fromInterface);
}
public void addFromPolicyStatement(@Nonnull PsFromPolicyStatement fromPolicyStatement) {
_atLeastOneFrom = true;
_fromPolicyStatements.add(fromPolicyStatement);
}
public void addFromPolicyStatementConjunction(
@Nonnull PsFromPolicyStatementConjunction fromPolicyStatementConjunction) {
_atLeastOneFrom = true;
_fromPolicyStatementConjunctions.add(fromPolicyStatementConjunction);
}
public void addFromPrefixList(@Nonnull PsFromPrefixList fromPrefixList) {
_atLeastOneFrom = true;
_fromPrefixLists.add(fromPrefixList);
}
public void addFromPrefixListFilterLonger(
@Nonnull PsFromPrefixListFilterLonger fromPrefixListFilterLonger) {
_atLeastOneFrom = true;
_fromPrefixListFilterLongers.add(fromPrefixListFilterLonger);
}
public void addFromPrefixListFilterOrLonger(
@Nonnull PsFromPrefixListFilterOrLonger fromPrefixListFilterOrLonger) {
_atLeastOneFrom = true;
_fromPrefixListFilterOrLongers.add(fromPrefixListFilterOrLonger);
}
public void addFromProtocol(@Nonnull PsFromProtocol fromProtocol) {
_atLeastOneFrom = true;
_fromProtocols.add(fromProtocol);
}
public void addFromRouteFilter(@Nonnull PsFromRouteFilter fromRouteFilter) {
_atLeastOneFrom = true;
_fromRouteFilters.add(fromRouteFilter);
}
public void addFromTag(@Nonnull PsFromTag fromTag) {
_atLeastOneFrom = true;
_fromTags.add(fromTag);
}
@Nonnull
Set<PsFromAsPath> getFromAsPaths() {
return _fromAsPaths;
}
@VisibleForTesting
public @Nullable PsFromColor getFromColor() {
return _fromColor;
}
@Nonnull
Set<PsFromCommunity> getFromCommunities() {
return _fromCommunities;
}
@VisibleForTesting
public @Nonnull Set<PsFromCondition> getFromConditions() {
return _fromConditions;
}
@Nullable
PsFromFamily getFromFamily() {
return _fromFamily;
}
PsFromInstance getFromInstance() {
return _fromInstance;
}
@Nonnull
Set<PsFromInterface> getFromInterfaces() {
return _fromInterfaces;
}
@VisibleForTesting
public @Nullable PsFromLocalPreference getFromLocalPreference() {
return _fromLocalPreference;
}
@Nullable
PsFromMetric getFromMetric() {
return _fromMetric;
}
@Nonnull
Set<PsFromPolicyStatement> getFromPolicyStatements() {
return _fromPolicyStatements;
}
@Nonnull
Set<PsFromPolicyStatementConjunction> getFromPolicyStatementConjunctions() {
return _fromPolicyStatementConjunctions;
}
@Nonnull
Set<PsFromPrefixList> getFromPrefixLists() {
return _fromPrefixLists;
}
@Nonnull
Set<PsFromPrefixListFilterLonger> getFromPrefixListFilterLongers() {
return _fromPrefixListFilterLongers;
}
@Nonnull
Set<PsFromPrefixListFilterOrLonger> getFromPrefixListFilterOrLongers() {
return _fromPrefixListFilterOrLongers;
}
@Nonnull
Set<PsFromProtocol> getFromProtocols() {
return _fromProtocols;
}
@Nonnull
Set<PsFromRouteFilter> getFromRouteFilters() {
return _fromRouteFilters;
}
@VisibleForTesting
public @Nonnull Set<PsFromTag> getFromTags() {
return _fromTags;
}
@Nullable
PsFromUnsupported getFromUnsupported() {
return _fromUnsupported;
}
boolean hasAtLeastOneFrom() {
return _atLeastOneFrom;
}
public void setFromColor(@Nonnull PsFromColor fromColor) {
_atLeastOneFrom = true;
_fromColor = fromColor;
}
public void setFromFamily(@Nonnull PsFromFamily fromFamily) {
_atLeastOneFrom = true;
_fromFamily = fromFamily;
}
public void setFromInstance(@Nonnull PsFromInstance fromInstance) {
_atLeastOneFrom = true;
_fromInstance = fromInstance;
}
public void setFromLocalPreference(@Nonnull PsFromLocalPreference fromLocalPreference) {
_atLeastOneFrom = true;
_fromLocalPreference = fromLocalPreference;
}
public void setFromMetric(@Nonnull PsFromMetric fromMetric) {
_atLeastOneFrom = true;
_fromMetric = fromMetric;
}
public void setFromUnsupported(@Nonnull PsFromUnsupported fromUnsupported) {
_atLeastOneFrom = true;
_fromUnsupported = fromUnsupported;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.File;
import java.util.AbstractMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import com.google.common.base.Throwables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.commitlog.CommitLog;
import org.apache.cassandra.db.commitlog.ReplayPosition;
import org.apache.cassandra.db.composites.CellNameType;
import org.apache.cassandra.db.index.SecondaryIndexManager;
import org.apache.cassandra.dht.LongToken;
import org.apache.cassandra.io.sstable.SSTableReader;
import org.apache.cassandra.io.sstable.SSTableWriter;
import org.apache.cassandra.io.sstable.metadata.MetadataCollector;
import org.apache.cassandra.io.util.DiskAwareRunnable;
import org.apache.cassandra.service.ActiveRepairService;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.ObjectSizes;
import org.apache.cassandra.utils.concurrent.OpOrder;
import org.apache.cassandra.utils.memory.*;
public class Memtable
{
private static final Logger logger = LoggerFactory.getLogger(Memtable.class);
static final MemtablePool MEMORY_POOL = DatabaseDescriptor.getMemtableAllocatorPool();
private static final int ROW_OVERHEAD_HEAP_SIZE = estimateRowOverhead(Integer.valueOf(System.getProperty("cassandra.memtable_row_overhead_computation_step", "100000")));
private final MemtableAllocator allocator;
private final AtomicLong liveDataSize = new AtomicLong(0);
private final AtomicLong currentOperations = new AtomicLong(0);
// the write barrier for directing writes to this memtable during a switch
private volatile OpOrder.Barrier writeBarrier;
// the last ReplayPosition owned by this Memtable; all ReplayPositions lower are owned by this or an earlier Memtable
private final AtomicReference<ReplayPosition> lastReplayPosition = new AtomicReference<>();
// the "first" ReplayPosition owned by this Memtable; this is inaccurate, and only used as a convenience to prevent CLSM flushing wantonly
private final ReplayPosition minReplayPosition = CommitLog.instance.getContext();
// We index the memtable by RowPosition only for the purpose of being able
// to select key range using Token.KeyBound. However put() ensures that we
// actually only store DecoratedKey.
private final ConcurrentNavigableMap<RowPosition, AtomicBTreeColumns> rows = new ConcurrentSkipListMap<>();
public final ColumnFamilyStore cfs;
private final long creationTime = System.currentTimeMillis();
private final long creationNano = System.nanoTime();
// Record the comparator of the CFS at the creation of the memtable. This
// is only used when a user update the CF comparator, to know if the
// memtable was created with the new or old comparator.
public final CellNameType initialComparator;
public Memtable(ColumnFamilyStore cfs)
{
this.cfs = cfs;
this.allocator = MEMORY_POOL.newAllocator();
this.initialComparator = cfs.metadata.comparator;
this.cfs.scheduleFlush();
}
public MemtableAllocator getAllocator()
{
return allocator;
}
public long getLiveDataSize()
{
return liveDataSize.get();
}
public long getOperations()
{
return currentOperations.get();
}
void setDiscarding(OpOrder.Barrier writeBarrier, ReplayPosition minLastReplayPosition)
{
assert this.writeBarrier == null;
this.lastReplayPosition.set(minLastReplayPosition);
this.writeBarrier = writeBarrier;
allocator.setDiscarding();
}
void setDiscarded()
{
allocator.setDiscarded();
}
public boolean accepts(OpOrder.Group opGroup)
{
OpOrder.Barrier barrier = this.writeBarrier;
return barrier == null || barrier.isAfter(opGroup);
}
public boolean isLive()
{
return allocator.isLive();
}
public boolean isClean()
{
return rows.isEmpty();
}
public boolean isCleanAfter(ReplayPosition position)
{
return isClean() || (position != null && minReplayPosition.compareTo(position) >= 0);
}
/**
* @return true if this memtable is expired. Expiration time is determined by CF's memtable_flush_period_in_ms.
*/
public boolean isExpired()
{
int period = cfs.metadata.getMemtableFlushPeriod();
return period > 0 && (System.nanoTime() - creationNano >= TimeUnit.MILLISECONDS.toNanos(period));
}
/**
* Should only be called by ColumnFamilyStore.apply via Keyspace.apply, which supplies the appropriate
* OpOrdering.
*
* replayPosition should only be null if this is a secondary index, in which case it is *expected* to be null
*/
void put(DecoratedKey key, ColumnFamily cf, SecondaryIndexManager.Updater indexer, OpOrder.Group opGroup, ReplayPosition replayPosition)
{
if (replayPosition != null && writeBarrier != null)
{
// if the writeBarrier is set, we want to maintain lastReplayPosition; this is an optimisation to avoid
// casing it for every write, but still ensure it is correct when writeBarrier.await() completes.
while (true)
{
ReplayPosition last = lastReplayPosition.get();
if (last.compareTo(replayPosition) >= 0)
break;
if (lastReplayPosition.compareAndSet(last, replayPosition))
break;
}
}
AtomicBTreeColumns previous = rows.get(key);
if (previous == null)
{
AtomicBTreeColumns empty = cf.cloneMeShallow(AtomicBTreeColumns.factory, false);
final DecoratedKey cloneKey = allocator.clone(key, opGroup);
// We'll add the columns later. This avoids wasting works if we get beaten in the putIfAbsent
previous = rows.putIfAbsent(cloneKey, empty);
if (previous == null)
{
previous = empty;
// allocate the row overhead after the fact; this saves over allocating and having to free after, but
// means we can overshoot our declared limit.
int overhead = (int) (cfs.partitioner.getHeapSizeOf(key.getToken()) + ROW_OVERHEAD_HEAP_SIZE);
allocator.onHeap().allocate(overhead, opGroup);
}
else
{
allocator.reclaimer().reclaimImmediately(cloneKey);
}
}
liveDataSize.addAndGet(previous.addAllWithSizeDelta(cf, allocator, opGroup, indexer));
currentOperations.addAndGet(cf.getColumnCount() + (cf.isMarkedForDelete() ? 1 : 0) + cf.deletionInfo().rangeCount());
}
// for debugging
public String contents()
{
StringBuilder builder = new StringBuilder();
builder.append("{");
for (Map.Entry<RowPosition, AtomicBTreeColumns> entry : rows.entrySet())
{
builder.append(entry.getKey()).append(": ").append(entry.getValue()).append(", ");
}
builder.append("}");
return builder.toString();
}
public FlushRunnable flushRunnable()
{
return new FlushRunnable(lastReplayPosition.get());
}
public String toString()
{
return String.format("Memtable-%s@%s(%s serialized bytes, %s ops, %.0f%%/%.0f%% of on/off-heap limit)",
cfs.name, hashCode(), liveDataSize, currentOperations, 100 * allocator.onHeap().ownershipRatio(), 100 * allocator.offHeap().ownershipRatio());
}
/**
* @param startWith Include data in the result from and including this key and to the end of the memtable
* @return An iterator of entries with the data from the start key
*/
public Iterator<Map.Entry<DecoratedKey, ColumnFamily>> getEntryIterator(final RowPosition startWith, final RowPosition stopAt)
{
return new Iterator<Map.Entry<DecoratedKey, ColumnFamily>>()
{
private Iterator<? extends Map.Entry<? extends RowPosition, AtomicBTreeColumns>> iter = stopAt.isMinimum(cfs.partitioner)
? rows.tailMap(startWith).entrySet().iterator()
: rows.subMap(startWith, true, stopAt, true).entrySet().iterator();
private Map.Entry<? extends RowPosition, ? extends ColumnFamily> currentEntry;
public boolean hasNext()
{
return iter.hasNext();
}
public Map.Entry<DecoratedKey, ColumnFamily> next()
{
Map.Entry<? extends RowPosition, ? extends ColumnFamily> entry = iter.next();
// Actual stored key should be true DecoratedKey
assert entry.getKey() instanceof DecoratedKey;
if (MEMORY_POOL.needToCopyOnHeap())
{
DecoratedKey key = (DecoratedKey) entry.getKey();
key = new BufferDecoratedKey(key.getToken(), HeapAllocator.instance.clone(key.getKey()));
ColumnFamily cells = ArrayBackedSortedColumns.localCopy(entry.getValue(), HeapAllocator.instance);
entry = new AbstractMap.SimpleImmutableEntry<>(key, cells);
}
// Store the reference to the current entry so that remove() can update the current size.
currentEntry = entry;
// Object cast is required since otherwise we can't turn RowPosition into DecoratedKey
return (Map.Entry<DecoratedKey, ColumnFamily>) entry;
}
public void remove()
{
iter.remove();
liveDataSize.addAndGet(-currentEntry.getValue().dataSize());
currentEntry = null;
}
};
}
public ColumnFamily getColumnFamily(DecoratedKey key)
{
return rows.get(key);
}
public long creationTime()
{
return creationTime;
}
public ReplayPosition getLastReplayPosition()
{
return lastReplayPosition.get();
}
class FlushRunnable extends DiskAwareRunnable
{
private final ReplayPosition context;
private final long estimatedSize;
FlushRunnable(ReplayPosition context)
{
this.context = context;
long keySize = 0;
for (RowPosition key : rows.keySet())
{
// make sure we don't write non-sensical keys
assert key instanceof DecoratedKey;
keySize += ((DecoratedKey)key).getKey().remaining();
}
estimatedSize = (long) ((keySize // index entries
+ keySize // keys in data file
+ liveDataSize.get()) // data
* 1.2); // bloom filter and row index overhead
}
public long getExpectedWriteSize()
{
return estimatedSize;
}
protected void runWith(File sstableDirectory) throws Exception
{
assert sstableDirectory != null : "Flush task is not bound to any disk";
SSTableReader sstable = writeSortedContents(context, sstableDirectory);
cfs.replaceFlushed(Memtable.this, sstable);
}
protected Directories getDirectories()
{
return cfs.directories;
}
private SSTableReader writeSortedContents(ReplayPosition context, File sstableDirectory)
throws ExecutionException, InterruptedException
{
logger.info("Writing {}", Memtable.this.toString());
SSTableReader ssTable;
// errors when creating the writer that may leave empty temp files.
SSTableWriter writer = createFlushWriter(cfs.getTempSSTablePath(sstableDirectory));
try
{
// (we can't clear out the map as-we-go to free up memory,
// since the memtable is being used for queries in the "pending flush" category)
for (Map.Entry<RowPosition, AtomicBTreeColumns> entry : rows.entrySet())
{
ColumnFamily cf = entry.getValue();
if (cf.isMarkedForDelete() && cf.hasColumns())
{
// When every node is up, there's no reason to write batchlog data out to sstables
// (which in turn incurs cost like compaction) since the BL write + delete cancel each other out,
// and BL data is strictly local, so we don't need to preserve tombstones for repair.
// If we have a data row + row level tombstone, then writing it is effectively an expensive no-op so we skip it.
// See CASSANDRA-4667.
if (cfs.name.equals(SystemKeyspace.BATCHLOG_CF) && cfs.keyspace.getName().equals(Keyspace.SYSTEM_KS))
continue;
}
if (!cf.isEmpty())
writer.append((DecoratedKey)entry.getKey(), cf);
}
if (writer.getFilePointer() > 0)
{
writer.isolateReferences();
// temp sstables should contain non-repaired data.
ssTable = writer.closeAndOpenReader();
logger.info(String.format("Completed flushing %s (%d bytes) for commitlog position %s",
ssTable.getFilename(), new File(ssTable.getFilename()).length(), context));
}
else
{
writer.abort();
ssTable = null;
logger.info("Completed flushing; nothing needed to be retained. Commitlog position was {}",
context);
}
return ssTable;
}
catch (Throwable e)
{
writer.abort();
throw Throwables.propagate(e);
}
}
public SSTableWriter createFlushWriter(String filename) throws ExecutionException, InterruptedException
{
MetadataCollector sstableMetadataCollector = new MetadataCollector(cfs.metadata.comparator).replayPosition(context);
return new SSTableWriter(filename,
rows.size(),
ActiveRepairService.UNREPAIRED_SSTABLE,
cfs.metadata,
cfs.partitioner,
sstableMetadataCollector);
}
}
private static int estimateRowOverhead(final int count)
{
// calculate row overhead
final OpOrder.Group group = new OpOrder().start();
int rowOverhead;
MemtableAllocator allocator = MEMORY_POOL.newAllocator();
ConcurrentNavigableMap<RowPosition, Object> rows = new ConcurrentSkipListMap<>();
final Object val = new Object();
for (int i = 0 ; i < count ; i++)
rows.put(allocator.clone(new BufferDecoratedKey(new LongToken((long) i), ByteBufferUtil.EMPTY_BYTE_BUFFER), group), val);
double avgSize = ObjectSizes.measureDeep(rows) / (double) count;
rowOverhead = (int) ((avgSize - Math.floor(avgSize)) < 0.05 ? Math.floor(avgSize) : Math.ceil(avgSize));
rowOverhead -= ObjectSizes.measureDeep(new LongToken((long) 0));
rowOverhead += AtomicBTreeColumns.EMPTY_SIZE;
allocator.setDiscarding();
allocator.setDiscarded();
return rowOverhead;
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.xml.impl.schema;
import com.intellij.codeInsight.daemon.Validator;
import com.intellij.psi.*;
import com.intellij.psi.meta.PsiWritableMetaData;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.*;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.xml.*;
import com.intellij.xml.util.XmlEnumeratedValueReference;
import com.intellij.xml.util.XmlUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* @author Mike
*/
public class XmlElementDescriptorImpl extends XsdEnumerationDescriptor<XmlTag>
implements XmlElementDescriptor, PsiWritableMetaData, Validator<XmlTag>,
XmlElementDescriptorAwareAboutChildren {
protected XmlTag myDescriptorTag;
protected volatile XmlNSDescriptor NSDescriptor;
private volatile @Nullable Validator<XmlTag> myValidator;
@NonNls
public static final String QUALIFIED_ATTR_VALUE = "qualified";
@NonNls
public static final String NONQUALIFIED_ATTR_VALUE = "unqualified";
@NonNls
private static final String ELEMENT_FORM_DEFAULT = "elementFormDefault";
public XmlElementDescriptorImpl(@Nullable XmlTag descriptorTag) {
myDescriptorTag = descriptorTag;
}
public XmlElementDescriptorImpl() {}
@Override
public XmlTag getDeclaration(){
return myDescriptorTag;
}
@Override
public String getName(PsiElement context){
String value = myDescriptorTag.getAttributeValue("name");
if(context instanceof XmlElement){
final String namespace = getNamespaceByContext(context);
final XmlTag tag = PsiTreeUtil.getParentOfType(context, XmlTag.class, false);
if(tag != null){
final String namespacePrefix = tag.getPrefixByNamespace(namespace);
if (namespacePrefix != null && namespacePrefix.length() > 0) {
final XmlTag rootTag = ((XmlFile)myDescriptorTag.getContainingFile()).getRootTag();
String elementFormDefault;
if (rootTag != null &&
( NONQUALIFIED_ATTR_VALUE.equals(elementFormDefault = rootTag.getAttributeValue(ELEMENT_FORM_DEFAULT)) || elementFormDefault == null /*unqualified is default*/) &&
tag.getNamespaceByPrefix("").isEmpty()
&& myDescriptorTag.getParentTag() != rootTag
) {
value = XmlUtil.findLocalNameByQualifiedName(value);
} else {
value = namespacePrefix + ":" + XmlUtil.findLocalNameByQualifiedName(value);
}
}
}
}
return value;
}
/** getter for _local_ name */
@Override
public String getName() {
return XmlUtil.findLocalNameByQualifiedName(getName(null));
}
public String getNamespaceByContext(PsiElement context){
//while(context != null){
// if(context instanceof XmlTag){
// final XmlTag contextTag = ((XmlTag)context);
// final XmlNSDescriptorImpl schemaDescriptor = XmlUtil.findXmlNSDescriptorByType(contextTag);
// if (schemaDescriptor != null) {
// return schemaDescriptor.getDefaultNamespace();
// }
// }
// context = context.getContext();
//}
return getNamespace();
}
public String getNamespace(){
String name = getName();
if (name == null) return XmlUtil.EMPTY_URI;
if (getNSDescriptor() == null || myDescriptorTag == null) return XmlUtil.EMPTY_URI;
final String namespacePrefix = XmlUtil.findPrefixByQualifiedName(name);
return namespacePrefix.isEmpty() ?
getDefaultNamespace() :
myDescriptorTag.getNamespaceByPrefix(namespacePrefix);
}
@Override
public void init(PsiElement element){
if (myDescriptorTag!=element && myDescriptorTag!=null) {
NSDescriptor = null;
}
myDescriptorTag = (XmlTag) element;
}
@Override
public Object[] getDependences(){
return new Object[]{myDescriptorTag};
}
private XmlNSDescriptor getNSDescriptor(XmlElement context) {
XmlNSDescriptor nsDescriptor = getNSDescriptor();
if (context instanceof XmlTag && nsDescriptor instanceof XmlNSDescriptorImpl) {
final String defaultNamespace = ((XmlNSDescriptorImpl)nsDescriptor).getDefaultNamespace();
if (XmlUtil.XML_SCHEMA_URI.equals(defaultNamespace)) return nsDescriptor; // do not check for overriden for efficiency
final XmlTag tag = (XmlTag)context;
final String tagNs = tag.getNamespace();
if (tagNs.equals(defaultNamespace)) {
XmlNSDescriptor previousDescriptor = nsDescriptor;
nsDescriptor = tag.getNSDescriptor(tagNs, true);
if (nsDescriptor == null) nsDescriptor = previousDescriptor;
}
}
return nsDescriptor;
}
@Override
public XmlNSDescriptor getNSDescriptor() {
XmlNSDescriptor nsDescriptor = NSDescriptor;
if (nsDescriptor == null || !NSDescriptor.getDeclaration().isValid()) {
final XmlFile file = XmlUtil.getContainingFile(getDeclaration());
if(file == null) return null;
final XmlDocument document = file.getDocument();
if(document == null) return null;
NSDescriptor = nsDescriptor = (XmlNSDescriptor)document.getMetaData();
}
return nsDescriptor;
}
@Override
public XmlElementsGroup getTopGroup() {
TypeDescriptor type = getType();
return type instanceof ComplexTypeDescriptor ? ((ComplexTypeDescriptor)type).getTopGroup() : null;
}
@Nullable
public TypeDescriptor getType() {
return getType(null);
}
@Nullable
public TypeDescriptor getType(XmlElement context) {
final XmlNSDescriptor nsDescriptor = getNSDescriptor(context);
if (!(nsDescriptor instanceof XmlNSTypeDescriptorProvider)) return null;
TypeDescriptor type = ((XmlNSTypeDescriptorProvider) nsDescriptor).getTypeDescriptor(myDescriptorTag);
if (type == null) {
String substAttr = myDescriptorTag.getAttributeValue("substitutionGroup");
if (substAttr != null) {
final String namespacePrefix = XmlUtil.findPrefixByQualifiedName(substAttr);
final String namespace = namespacePrefix.isEmpty() ?
getDefaultNamespace() :
myDescriptorTag.getNamespaceByPrefix(namespacePrefix);
final String local = XmlUtil.findLocalNameByQualifiedName(substAttr);
final XmlElementDescriptorImpl originalElement = (XmlElementDescriptorImpl)((XmlNSDescriptorImpl)getNSDescriptor()).getElementDescriptor(local, namespace);
if (originalElement != null && originalElement != this) {
type = originalElement.getType(context);
}
}
}
return type;
}
@Override
public XmlElementDescriptor[] getElementsDescriptors(XmlTag context) {
if (context != null) {
final XmlElementDescriptor parentDescriptorByType = XmlUtil.findXmlDescriptorByType(context);
if (parentDescriptorByType != null && !parentDescriptorByType.equals(this)) {
return parentDescriptorByType.getElementsDescriptors(context);
}
}
XmlElementDescriptor[] elementsDescriptors = getElementsDescriptorsImpl(context);
final TypeDescriptor type = getType(context);
if (type instanceof ComplexTypeDescriptor) {
final ComplexTypeDescriptor descriptor = (ComplexTypeDescriptor)type;
PsiFile containingFile = context != null ? context.getContainingFile():null;
if (context != null && !containingFile.isPhysical()) {
containingFile = containingFile.getOriginalFile();
//context = context.getParentTag();
}
String contextNs;
if (context != null &&
descriptor.canContainTag(context.getLocalName(), contextNs = context.getNamespace(), context) &&
(!contextNs.equals(getNamespace()) || descriptor.hasAnyInContentModel()) &&
containingFile instanceof XmlFile) { // JSXmlLiteralExpressionImpl, being an xml element itself, may be contained in non-XML file
final XmlNSDescriptor nsDescriptor = getNSDescriptor();
if (nsDescriptor != null) {
elementsDescriptors = ArrayUtil.mergeArrays(
elementsDescriptors,
nsDescriptor.getRootElementsDescriptors(((XmlFile)containingFile).getDocument())
);
}
}
}
return elementsDescriptors;
}
private XmlElementDescriptor[] getElementsDescriptorsImpl(XmlElement context) {
TypeDescriptor type = getType(context);
if (type instanceof ComplexTypeDescriptor) {
ComplexTypeDescriptor typeDescriptor = (ComplexTypeDescriptor)type;
XmlElementDescriptor[] elements = typeDescriptor.getElements(context);
if (context instanceof XmlTag && elements.length > 0) {
String[] namespaces = ((XmlTag)context).knownNamespaces();
if (namespaces.length > 1) {
List<XmlElementDescriptor> result = new ArrayList<>(Arrays.asList(elements));
for (String namespace : namespaces) {
if (namespace.equals(typeDescriptor.getNsDescriptor().getDefaultNamespace())) {
continue;
}
XmlNSDescriptor descriptor = ((XmlTag)context).getNSDescriptor(namespace, false);
if (descriptor instanceof XmlNSDescriptorImpl && ((XmlNSDescriptorImpl)descriptor).hasSubstitutions()) {
for (XmlElementDescriptor element : elements) {
String name = XmlUtil.getLocalName(element.getName(context)).toString();
String s = ((XmlNSDescriptorImpl)element.getNSDescriptor()).getDefaultNamespace();
XmlElementDescriptor[] substitutes = ((XmlNSDescriptorImpl)descriptor).getSubstitutes(name, s);
result.addAll(Arrays.asList(substitutes));
}
}
}
return result.toArray(new XmlElementDescriptor[result.size()]);
}
}
return elements;
}
return EMPTY_ARRAY;
}
@Override
public XmlAttributeDescriptor[] getAttributesDescriptors(final XmlTag context) {
TypeDescriptor type = getType(context);
if (type instanceof ComplexTypeDescriptor) {
ComplexTypeDescriptor typeDescriptor = (ComplexTypeDescriptor)type;
XmlAttributeDescriptor[] attributeDescriptors = typeDescriptor.getAttributes(context);
if (context != null) {
final String contextNs = context.getNamespace();
boolean seenXmlNs = false;
for(String ns:context.knownNamespaces()) {
if (!contextNs.equals(ns) && ns.length() > 0) {
seenXmlNs |= XmlUtil.XML_NAMESPACE_URI.equals(ns);
attributeDescriptors = updateAttributeDescriptorsFromAny(context, typeDescriptor, attributeDescriptors, ns);
}
}
if (!seenXmlNs) {
attributeDescriptors = updateAttributeDescriptorsFromAny(context, typeDescriptor, attributeDescriptors, XmlUtil.XML_NAMESPACE_URI);
}
}
return attributeDescriptors;
}
return XmlAttributeDescriptor.EMPTY;
}
/** <xsd:anyAttribute> directive processed here */
private static XmlAttributeDescriptor[] updateAttributeDescriptorsFromAny(final XmlTag context,
final ComplexTypeDescriptor typeDescriptor,
XmlAttributeDescriptor[] attributeDescriptors,
final String ns) {
if (typeDescriptor.canContainAttribute(ns, null) != ComplexTypeDescriptor.CanContainAttributeType.CanNotContain) {
// anyAttribute found
final XmlNSDescriptor descriptor = context.getNSDescriptor(ns, true);
if (descriptor instanceof XmlNSDescriptorImpl) {
XmlAttributeDescriptor[] rootDescriptors = ((XmlNSDescriptorImpl)descriptor).getRootAttributeDescriptors(context);
attributeDescriptors = ArrayUtil.mergeArrays(attributeDescriptors, rootDescriptors);
}
}
return attributeDescriptors;
}
@Override
public XmlAttributeDescriptor getAttributeDescriptor(String attributeName, final XmlTag context){
return getAttributeDescriptorImpl(attributeName,context);
}
@Nullable
private XmlAttributeDescriptor getAttributeDescriptorImpl(final String attributeName, XmlTag context) {
final String localName = XmlUtil.findLocalNameByQualifiedName(attributeName);
final String namespacePrefix = XmlUtil.findPrefixByQualifiedName(attributeName);
final String namespace = namespacePrefix.isEmpty() ?
getDefaultNamespace() :
context.getNamespaceByPrefix(namespacePrefix);
XmlAttributeDescriptor attribute = getAttribute(localName, namespace, context, attributeName);
if (attribute instanceof AnyXmlAttributeDescriptor) {
final ComplexTypeDescriptor.CanContainAttributeType containAttributeType =
((AnyXmlAttributeDescriptor)attribute).getCanContainAttributeType();
if (containAttributeType != ComplexTypeDescriptor.CanContainAttributeType.CanContainAny && !namespace.isEmpty()) {
final XmlNSDescriptor candidateNSDescriptor = context.getNSDescriptor(namespace, true);
if (candidateNSDescriptor instanceof XmlNSDescriptorImpl) {
final XmlNSDescriptorImpl nsDescriptor = (XmlNSDescriptorImpl)candidateNSDescriptor;
final XmlAttributeDescriptor xmlAttributeDescriptor = nsDescriptor.getAttribute(localName, namespace, context);
if (xmlAttributeDescriptor != null) return xmlAttributeDescriptor;
else {
if (containAttributeType == ComplexTypeDescriptor.CanContainAttributeType.CanContainButDoNotSkip) {
attribute = null;
}
}
}
}
}
return attribute;
}
private String getDefaultNamespace() {
XmlNSDescriptor nsDescriptor = getNSDescriptor();
return nsDescriptor instanceof XmlNSDescriptorImpl ? ((XmlNSDescriptorImpl)nsDescriptor).getDefaultNamespace() : "";
}
@Override
public XmlAttributeDescriptor getAttributeDescriptor(XmlAttribute attribute){
return getAttributeDescriptorImpl(attribute.getName(),attribute.getParent());
}
@Nullable
private XmlAttributeDescriptor getAttribute(String attributeName, String namespace, XmlTag context, String qName) {
XmlAttributeDescriptor[] descriptors = getAttributesDescriptors(context);
for (XmlAttributeDescriptor descriptor : descriptors) {
if (descriptor.getName().equals(attributeName) &&
descriptor.getName(context).equals(qName)
) {
return descriptor;
}
}
TypeDescriptor type = getType(context);
if (type instanceof ComplexTypeDescriptor) {
ComplexTypeDescriptor descriptor = (ComplexTypeDescriptor)type;
final ComplexTypeDescriptor.CanContainAttributeType containAttributeType = descriptor.canContainAttribute(namespace, qName);
if (containAttributeType != ComplexTypeDescriptor.CanContainAttributeType.CanNotContain) {
return new AnyXmlAttributeDescriptor(attributeName, containAttributeType);
}
}
return null;
}
@Override
public int getContentType() {
TypeDescriptor type = getType();
if (type instanceof ComplexTypeDescriptor) {
return ((ComplexTypeDescriptor)type).getContentType();
}
return CONTENT_TYPE_MIXED;
}
@Nullable
public XmlElementDescriptor getElementDescriptor(final String name) {
final String localName = XmlUtil.findLocalNameByQualifiedName(name);
final String namespacePrefix = XmlUtil.findPrefixByQualifiedName(name);
final String namespace = namespacePrefix.isEmpty() ?
getDefaultNamespace() :
myDescriptorTag.getNamespaceByPrefix(namespacePrefix);
return getElementDescriptor(localName, namespace, null, name);
}
@Nullable
protected XmlElementDescriptor getElementDescriptor(final String localName, final String namespace, XmlElement context, String fullName) {
XmlElementDescriptor[] elements = getElementsDescriptorsImpl(context);
for (XmlElementDescriptor element1 : elements) {
final XmlElementDescriptorImpl element = (XmlElementDescriptorImpl)element1;
final String namespaceByContext = element.getNamespaceByContext(context);
if (element.getName().equals(localName)) {
if (namespace == null ||
namespace.equals(namespaceByContext) ||
namespaceByContext.equals(XmlUtil.EMPTY_URI) ||
element.getName(context).equals(fullName) || (namespace.length() == 0) &&
element.getDefaultName().equals(fullName)
) {
return element;
}
else {
final XmlNSDescriptor descriptor = context instanceof XmlTag? ((XmlTag)context).getNSDescriptor(namespace, true) : null;
// schema's targetNamespace could be different from file systemId used as NS
if (descriptor instanceof XmlNSDescriptorImpl) {
if (((XmlNSDescriptorImpl)descriptor).getDefaultNamespace().equals(namespaceByContext)) {
return element;
}
else {
((XmlNSDescriptorImpl)descriptor).getSubstitutes(localName, namespace);
}
}
}
}
}
TypeDescriptor type = getType(context);
if (type instanceof ComplexTypeDescriptor) {
ComplexTypeDescriptor descriptor = (ComplexTypeDescriptor)type;
if (descriptor.canContainTag(localName, namespace, context)) {
return new AnyXmlElementDescriptor(this, getNSDescriptor());
}
}
return null;
}
@Override
public XmlElementDescriptor getElementDescriptor(XmlTag element, XmlTag contextTag){
final XmlElement context = (XmlElement)element.getParent();
XmlElementDescriptor elementDescriptor = getElementDescriptor(
element.getLocalName(),
element.getNamespace(), context,
element.getName()
);
if(elementDescriptor == null || element.getAttributeValue("xsi:type") != null){
final XmlElementDescriptor xmlDescriptorByType = XmlUtil.findXmlDescriptorByType(element);
if (xmlDescriptorByType != null) elementDescriptor = xmlDescriptorByType;
else if (context instanceof XmlTag && ((XmlTag)context).getAttributeValue("xsi:type") != null && askParentDescriptorViaXsi()) {
final XmlElementDescriptor parentXmlDescriptorByType = XmlUtil.findXmlDescriptorByType(((XmlTag)context));
if (parentXmlDescriptorByType != null) {
elementDescriptor = parentXmlDescriptorByType.getElementDescriptor(element, contextTag);
}
}
}
return elementDescriptor;
}
protected boolean askParentDescriptorViaXsi() {
return true;
}
@Override
public String getQualifiedName() {
String ns = getNS();
if (ns != null && !ns.isEmpty()) {
return ns + ":" + getName();
}
return getName();
}
@Nullable
private String getNS(){
return XmlUtil.findNamespacePrefixByURI((XmlFile) myDescriptorTag.getContainingFile(), getNamespace());
}
@Override
public String getDefaultName() {
final PsiFile psiFile = myDescriptorTag.getContainingFile();
XmlTag rootTag = psiFile instanceof XmlFile ?((XmlFile)psiFile).getRootTag():null;
if (rootTag != null && QUALIFIED_ATTR_VALUE.equals(rootTag.getAttributeValue(ELEMENT_FORM_DEFAULT))) {
return getQualifiedName();
}
return getName();
}
public boolean isAbstract() {
return isAbstractDeclaration(myDescriptorTag);
}
public static Boolean isAbstractDeclaration(final XmlTag descriptorTag) {
return Boolean.valueOf(descriptorTag.getAttributeValue("abstract"));
}
@Override
public void setName(String name) throws IncorrectOperationException {
NamedObjectDescriptor.setName(myDescriptorTag, name);
}
public void setValidator(final Validator<XmlTag> validator) {
myValidator = validator;
}
@Override
public void validate(@NotNull XmlTag context, @NotNull ValidationHost host) {
Validator<XmlTag> validator = myValidator;
if (validator != null) {
validator.validate(context, host);
}
}
@Override
public PsiReference[] getValueReferences(XmlTag xmlTag, @NotNull String text) {
XmlTagValue value = xmlTag.getValue();
XmlText[] elements = value.getTextElements();
if (elements.length == 0 || xmlTag.getSubTags().length > 0) return PsiReference.EMPTY_ARRAY;
return new PsiReference[] {
new XmlEnumeratedValueReference(xmlTag, this, ElementManipulators.getValueTextRange(xmlTag))
};
}
@Override
public boolean allowElementsFromNamespace(final String namespace, final XmlTag context) {
final TypeDescriptor type = getType(context);
if (type instanceof ComplexTypeDescriptor) {
final ComplexTypeDescriptor typeDescriptor = (ComplexTypeDescriptor)type;
return typeDescriptor.canContainTag("a", namespace, context) ||
typeDescriptor.getNsDescriptor().hasSubstitutions() ||
XmlUtil.nsFromTemplateFramework(namespace)
;
}
return false;
}
@Override
public String toString() {
String namespace;
try {
namespace = getNamespace();
}
catch (PsiInvalidElementAccessException e) {
namespace = "!!!Invalid!!!";
}
return getName() + " (" + namespace + ")";
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
XmlElementDescriptorImpl that = (XmlElementDescriptorImpl)o;
if (myDescriptorTag != null ? !myDescriptorTag.equals(that.myDescriptorTag) : that.myDescriptorTag != null) return false;
return true;
}
@Override
public int hashCode() {
return myDescriptorTag != null ? myDescriptorTag.hashCode() : 0;
}
}
| |
package com.example.ryo.job_employer.models;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import java.io.Serializable;
import java.sql.Time;
import java.util.Date;
import java.util.List;
/**
* Created by Administrator on 2015/10/22.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class Resume implements Serializable {
public String get_id() {
return _id;
}
public void set_id(String _id) {
this._id = _id;
}
public String _id;
public Candidate _candidate;
public int gender;
public String tel;
public CityBodyDown address;
public String name;
public String birth;
public Date updateEdit;
public String imgName;
public String expectedIndustry;
public String expectedPosition;
public CityBodyDown expectedAddress;
public String selfEvaluation;
public String experience;
public String works;
public String schoolName;
public String getProfessional() {
return professional;
}
public void setProfessional(String professional) {
this.professional = professional;
}
public Candidate get_candidate() {
return _candidate;
}
public void set_candidate(Candidate _candidate) {
this._candidate = _candidate;
}
public int getGender() {
return gender;
}
public void setGender(int gender) {
this.gender = gender;
}
public String getTel() {
return tel;
}
public void setTel(String tel) {
this.tel = tel;
}
public CityBodyDown getAddress() {
return address;
}
public void setAddress(CityBodyDown address) {
this.address = address;
}
public String getBirth() {
return birth;
}
public void setBirth(String birth) {
this.birth = birth;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getUpdateEdit() {
return updateEdit;
}
public void setUpdateEdit(Date updateEdit) {
this.updateEdit = updateEdit;
}
public String getImgName() {
return imgName;
}
public void setImgName(String imgName) {
this.imgName = imgName;
}
public String getExpectedIndustry() {
return expectedIndustry;
}
public void setExpectedIndustry(String expectedIndustry) {
this.expectedIndustry = expectedIndustry;
}
public String getExpectedPosition() {
return expectedPosition;
}
public void setExpectedPosition(String expectedPosition) {
this.expectedPosition = expectedPosition;
}
public String getSelfEvaluation() {
return selfEvaluation;
}
public void setSelfEvaluation(String selfEvaluation) {
this.selfEvaluation = selfEvaluation;
}
public CityBodyDown getExpectedAddress() {
return expectedAddress;
}
public void setExpectedAddress(CityBodyDown expectedAddress) {
this.expectedAddress = expectedAddress;
}
public String getWorks() {
return works;
}
public void setWorks(String works) {
this.works = works;
}
public String getExperience() {
return experience;
}
public void setExperience(String experience) {
this.experience = experience;
}
public String getSchoolName() {
return schoolName;
}
public void setSchoolName(String schoolName) {
this.schoolName = schoolName;
}
public String getGraduationTime() {
return graduationTime;
}
public void setGraduationTime(String graduationTime) {
this.graduationTime = graduationTime;
}
public String getGrade() {
return grade;
}
public void setGrade(String grade) {
this.grade = grade;
}
public String getInternshipExprience() {
return internshipExprience;
}
public void setInternshipExprience(String internshipExprience) {
this.internshipExprience = internshipExprience;
}
public int getTestValue() {
return testValue;
}
public void setTestValue(int testValue) {
this.testValue = testValue;
}
public Boolean getBeOpen() {
return beOpen;
}
public void setBeOpen(Boolean beOpen) {
this.beOpen = beOpen;
}
public List<TimeUpdate> getEmployer() {
return employer;
}
public void setEmployer(List<TimeUpdate> employer) {
this.employer = employer;
}
public List<Ignored> getBeIgnored() {
return beIgnored;
}
public void setBeIgnored(List<Ignored> beIgnored) {
this.beIgnored = beIgnored;
}
public String professional;
public String graduationTime;
public String grade;
public String internshipExprience;
public int testValue;
public Boolean beOpen;
public List<TimeUpdate> employer;
public List<Ignored> beIgnored;
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.queryablestate.itcases;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FoldFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.AggregatingState;
import org.apache.flink.api.common.state.AggregatingStateDescriptor;
import org.apache.flink.api.common.state.FoldingState;
import org.apache.flink.api.common.state.FoldingStateDescriptor;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.common.state.State;
import org.apache.flink.api.common.state.StateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Deadline;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.base.StringSerializer;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.client.ClientUtils;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.client.program.ProgramInvocationException;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.queryablestate.client.QueryableStateClient;
import org.apache.flink.queryablestate.client.VoidNamespace;
import org.apache.flink.queryablestate.client.VoidNamespaceSerializer;
import org.apache.flink.queryablestate.exceptions.UnknownKeyOrNamespaceException;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.runtime.concurrent.ScheduledExecutor;
import org.apache.flink.runtime.concurrent.ScheduledExecutorServiceAdapter;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.state.CheckpointListener;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.QueryableStateStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.util.Collector;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.TestLogger;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicLongArray;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Base class for queryable state integration tests with a configurable state backend.
*/
public abstract class AbstractQueryableStateTestBase extends TestLogger {
private static final Duration TEST_TIMEOUT = Duration.ofSeconds(200L);
private static final long RETRY_TIMEOUT = 50L;
private final ScheduledExecutorService executorService = Executors.newScheduledThreadPool(4);
private final ScheduledExecutor executor = new ScheduledExecutorServiceAdapter(executorService);
/**
* State backend to use.
*/
private StateBackend stateBackend;
/**
* Client shared between all the test.
*/
protected static QueryableStateClient client;
protected static ClusterClient<?> clusterClient;
protected static int maxParallelism;
@Before
public void setUp() throws Exception {
// NOTE: do not use a shared instance for all tests as the tests may break
this.stateBackend = createStateBackend();
Assert.assertNotNull(clusterClient);
maxParallelism = 4;
}
/**
* Creates a state backend instance which is used in the {@link #setUp()} method before each
* test case.
*
* @return a state backend instance for each unit test
*/
protected abstract StateBackend createStateBackend() throws Exception;
/**
* Runs a simple topology producing random (key, 1) pairs at the sources (where
* number of keys is in fixed in range 0...numKeys). The records are keyed and
* a reducing queryable state instance is created, which sums up the records.
*
* <p>After submitting the job in detached mode, the QueryableStateCLient is used
* to query the counts of each key in rounds until all keys have non-zero counts.
*/
@Test
public void testQueryableState() throws Exception {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final int numKeys = 256;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because cluster is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestKeyRangeSource(numKeys));
ReducingStateDescriptor<Tuple2<Integer, Long>> reducingState = new ReducingStateDescriptor<>(
"any-name", new SumReduce(), source.getType());
final String queryName = "hakuna-matata";
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = 7143749578983540352L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).asQueryableState(queryName, reducingState);
try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {
final JobID jobId = autoCancellableJob.getJobId();
final JobGraph jobGraph = autoCancellableJob.getJobGraph();
ClientUtils.submitJob(clusterClient, jobGraph);
final AtomicLongArray counts = new AtomicLongArray(numKeys);
final List<CompletableFuture<ReducingState<Tuple2<Integer, Long>>>> futures = new ArrayList<>(numKeys);
boolean allNonZero = false;
while (!allNonZero && deadline.hasTimeLeft()) {
allNonZero = true;
futures.clear();
for (int i = 0; i < numKeys; i++) {
final int key = i;
if (counts.get(key) > 0L) {
// Skip this one
continue;
} else {
allNonZero = false;
}
CompletableFuture<ReducingState<Tuple2<Integer, Long>>> result = getKvState(
deadline,
client,
jobId,
queryName,
key,
BasicTypeInfo.INT_TYPE_INFO,
reducingState,
false,
executor);
result.thenAccept(response -> {
try {
Tuple2<Integer, Long> res = response.get();
counts.set(key, res.f1);
assertEquals("Key mismatch", key, res.f0.intValue());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
});
futures.add(result);
}
// wait for all the futures to complete
CompletableFuture
.allOf(futures.toArray(new CompletableFuture<?>[futures.size()]))
.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
}
assertTrue("Not all keys are non-zero", allNonZero);
// All should be non-zero
for (int i = 0; i < numKeys; i++) {
long count = counts.get(i);
assertTrue("Count at position " + i + " is " + count, count > 0);
}
}
}
/**
* Tests that duplicate query registrations fail the job at the JobManager.
*/
@Test(timeout = 60_000)
public void testDuplicateRegistrationFailsJob() throws Exception {
final int numKeys = 256;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because cluster is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestKeyRangeSource(numKeys));
// Reducing state
ReducingStateDescriptor<Tuple2<Integer, Long>> reducingState = new ReducingStateDescriptor<>(
"any-name",
new SumReduce(),
source.getType());
final String queryName = "duplicate-me";
final QueryableStateStream<Integer, Tuple2<Integer, Long>> queryableState =
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = -4126824763829132959L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).asQueryableState(queryName, reducingState);
final QueryableStateStream<Integer, Tuple2<Integer, Long>> duplicate =
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = -6265024000462809436L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).asQueryableState(queryName);
// Submit the job graph
final JobGraph jobGraph = env.getStreamGraph().getJobGraph();
boolean caughtException = false;
try {
ClientUtils.submitJobAndWaitForResult(clusterClient, jobGraph, AbstractQueryableStateTestBase.class.getClassLoader());
} catch (ProgramInvocationException e) {
String failureCause = ExceptionUtils.stringifyException(e);
assertThat(failureCause, containsString("KvState with name '" + queryName + "' has already been registered by another operator"));
caughtException = true;
}
assertTrue(caughtException);
}
/**
* Tests simple value state queryable state instance. Each source emits
* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then
* queried. The tests succeeds after each subtask index is queried with
* value numElements (the latest element updated the state).
*/
@Test
public void testValueState() throws Exception {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final long numElements = 1024L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because cluster is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));
// Value state
ValueStateDescriptor<Tuple2<Integer, Long>> valueState = new ValueStateDescriptor<>("any", source.getType());
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = 7662520075515707428L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).asQueryableState("hakuna", valueState);
try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {
final JobID jobId = autoCancellableJob.getJobId();
final JobGraph jobGraph = autoCancellableJob.getJobGraph();
ClientUtils.submitJob(clusterClient, jobGraph);
executeValueQuery(deadline, client, jobId, "hakuna", valueState, numElements);
}
}
/**
* Tests that the correct exception is thrown if the query
* contains a wrong jobId or wrong queryable state name.
*/
@Test
@Ignore
public void testWrongJobIdAndWrongQueryableStateName() throws Exception {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final long numElements = 1024L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));
ValueStateDescriptor<Tuple2<Integer, Long>> valueState = new ValueStateDescriptor<>("any", source.getType());
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = 7662520075515707428L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).asQueryableState("hakuna", valueState);
try (AutoCancellableJob closableJobGraph = new AutoCancellableJob(deadline, clusterClient, env)) {
ClientUtils.submitJob(clusterClient, closableJobGraph.getJobGraph());
CompletableFuture<JobStatus> jobStatusFuture =
clusterClient.getJobStatus(closableJobGraph.getJobId());
while (deadline.hasTimeLeft() && !jobStatusFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).equals(JobStatus.RUNNING)) {
Thread.sleep(50);
jobStatusFuture =
clusterClient.getJobStatus(closableJobGraph.getJobId());
}
assertEquals(JobStatus.RUNNING, jobStatusFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS));
final JobID wrongJobId = new JobID();
CompletableFuture<ValueState<Tuple2<Integer, Long>>> unknownJobFuture = client.getKvState(
wrongJobId, // this is the wrong job id
"hakuna",
0,
BasicTypeInfo.INT_TYPE_INFO,
valueState);
try {
unknownJobFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
fail(); // by now the request must have failed.
} catch (ExecutionException e) {
Assert.assertTrue("GOT: " + e.getCause().getMessage(), e.getCause() instanceof RuntimeException);
Assert.assertTrue("GOT: " + e.getCause().getMessage(), e.getCause().getMessage().contains(
"FlinkJobNotFoundException: Could not find Flink job (" + wrongJobId + ")"));
} catch (Exception f) {
fail("Unexpected type of exception: " + f.getMessage());
}
CompletableFuture<ValueState<Tuple2<Integer, Long>>> unknownQSName = client.getKvState(
closableJobGraph.getJobId(),
"wrong-hakuna", // this is the wrong name.
0,
BasicTypeInfo.INT_TYPE_INFO,
valueState);
try {
unknownQSName.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
fail(); // by now the request must have failed.
} catch (ExecutionException e) {
Assert.assertTrue("GOT: " + e.getCause().getMessage(), e.getCause() instanceof RuntimeException);
Assert.assertTrue("GOT: " + e.getCause().getMessage(), e.getCause().getMessage().contains(
"UnknownKvStateLocation: No KvStateLocation found for KvState instance with name 'wrong-hakuna'."));
} catch (Exception f) {
fail("Unexpected type of exception: " + f.getMessage());
}
}
}
/**
* Similar tests as {@link #testValueState()} but before submitting the
* job, we already issue one request which fails.
*/
@Test
public void testQueryNonStartedJobState() throws Exception {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final long numElements = 1024L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because clusterClient is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));
ValueStateDescriptor<Tuple2<Integer, Long>> valueState = new ValueStateDescriptor<>(
"any", source.getType(), null);
QueryableStateStream<Integer, Tuple2<Integer, Long>> queryableState =
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = 7480503339992214681L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).asQueryableState("hakuna", valueState);
try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {
final JobID jobId = autoCancellableJob.getJobId();
final JobGraph jobGraph = autoCancellableJob.getJobGraph();
long expected = numElements;
// query once
client.getKvState(
autoCancellableJob.getJobId(),
queryableState.getQueryableStateName(),
0,
BasicTypeInfo.INT_TYPE_INFO,
valueState);
ClientUtils.submitJob(clusterClient, jobGraph);
executeValueQuery(deadline, client, jobId, "hakuna", valueState, expected);
}
}
/**
* Tests simple value state queryable state instance with a default value
* set. Each source emits (subtaskIndex, 0)..(subtaskIndex, numElements)
* tuples, the key is mapped to 1 but key 0 is queried which should throw
* a {@link UnknownKeyOrNamespaceException} exception.
*
* @throws UnknownKeyOrNamespaceException thrown due querying a non-existent key
*/
@Test(expected = UnknownKeyOrNamespaceException.class)
public void testValueStateDefault() throws Throwable {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final long numElements = 1024L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because cluster is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));
ValueStateDescriptor<Tuple2<Integer, Long>> valueState = new ValueStateDescriptor<>(
"any", source.getType(), Tuple2.of(0, 1337L));
// only expose key "1"
QueryableStateStream<Integer, Tuple2<Integer, Long>> queryableState = source.keyBy(
new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = 4509274556892655887L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return 1;
}
}).asQueryableState("hakuna", valueState);
try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {
final JobID jobId = autoCancellableJob.getJobId();
final JobGraph jobGraph = autoCancellableJob.getJobGraph();
ClientUtils.submitJob(clusterClient, jobGraph);
// Now query
int key = 0;
CompletableFuture<ValueState<Tuple2<Integer, Long>>> future = getKvState(
deadline,
client,
jobId,
queryableState.getQueryableStateName(),
key,
BasicTypeInfo.INT_TYPE_INFO,
valueState,
true,
executor);
try {
future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
} catch (ExecutionException | CompletionException e) {
// get() on a completedExceptionally future wraps the
// exception in an ExecutionException.
throw e.getCause();
}
}
}
/**
* Tests simple value state queryable state instance. Each source emits
* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then
* queried. The tests succeeds after each subtask index is queried with
* value numElements (the latest element updated the state).
*
* <p>This is the same as the simple value state test, but uses the API shortcut.
*/
@Test
public void testValueStateShortcut() throws Exception {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final long numElements = 1024L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because cluster is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));
// Value state shortcut
final QueryableStateStream<Integer, Tuple2<Integer, Long>> queryableState =
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = 9168901838808830068L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).asQueryableState("matata");
@SuppressWarnings("unchecked")
final ValueStateDescriptor<Tuple2<Integer, Long>> stateDesc =
(ValueStateDescriptor<Tuple2<Integer, Long>>) queryableState.getStateDescriptor();
try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {
final JobID jobId = autoCancellableJob.getJobId();
final JobGraph jobGraph = autoCancellableJob.getJobGraph();
ClientUtils.submitJob(clusterClient, jobGraph);
executeValueQuery(deadline, client, jobId, "matata", stateDesc, numElements);
}
}
/**
* Tests simple folding state queryable state instance. Each source emits
* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then
* queried. The folding state sums these up and maps them to Strings. The
* test succeeds after each subtask index is queried with result n*(n+1)/2
* (as a String).
*/
@Test
public void testFoldingState() throws Exception {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final int numElements = 1024;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because cluster is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));
FoldingStateDescriptor<Tuple2<Integer, Long>, String> foldingState = new FoldingStateDescriptor<>(
"any", "0", new SumFold(), StringSerializer.INSTANCE);
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = -842809958106747539L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).asQueryableState("pumba", foldingState);
try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {
final JobID jobId = autoCancellableJob.getJobId();
final JobGraph jobGraph = autoCancellableJob.getJobGraph();
ClientUtils.submitJob(clusterClient, jobGraph);
final String expected = Integer.toString(numElements * (numElements + 1) / 2);
for (int key = 0; key < maxParallelism; key++) {
boolean success = false;
while (deadline.hasTimeLeft() && !success) {
CompletableFuture<FoldingState<Tuple2<Integer, Long>, String>> future = getKvState(
deadline,
client,
jobId,
"pumba",
key,
BasicTypeInfo.INT_TYPE_INFO,
foldingState,
false,
executor);
String value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();
//assertEquals("Key mismatch", key, value.f0.intValue());
if (expected.equals(value)) {
success = true;
} else {
// Retry
Thread.sleep(RETRY_TIMEOUT);
}
}
assertTrue("Did not succeed query", success);
}
}
}
/**
* Tests simple reducing state queryable state instance. Each source emits
* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then
* queried. The reducing state instance sums these up. The test succeeds
* after each subtask index is queried with result n*(n+1)/2.
*/
@Test
public void testReducingState() throws Exception {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final long numElements = 1024L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because cluster is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));
ReducingStateDescriptor<Tuple2<Integer, Long>> reducingState = new ReducingStateDescriptor<>(
"any", new SumReduce(), source.getType());
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = 8470749712274833552L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).asQueryableState("jungle", reducingState);
try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {
final JobID jobId = autoCancellableJob.getJobId();
final JobGraph jobGraph = autoCancellableJob.getJobGraph();
ClientUtils.submitJob(clusterClient, jobGraph);
final long expected = numElements * (numElements + 1L) / 2L;
for (int key = 0; key < maxParallelism; key++) {
boolean success = false;
while (deadline.hasTimeLeft() && !success) {
CompletableFuture<ReducingState<Tuple2<Integer, Long>>> future = getKvState(
deadline,
client,
jobId,
"jungle",
key,
BasicTypeInfo.INT_TYPE_INFO,
reducingState,
false,
executor);
Tuple2<Integer, Long> value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();
assertEquals("Key mismatch", key, value.f0.intValue());
if (expected == value.f1) {
success = true;
} else {
// Retry
Thread.sleep(RETRY_TIMEOUT);
}
}
assertTrue("Did not succeed query", success);
}
}
}
/**
* Tests simple map state queryable state instance. Each source emits
* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then
* queried. The map state instance sums the values up. The test succeeds
* after each subtask index is queried with result n*(n+1)/2.
*/
@Test
public void testMapState() throws Exception {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final long numElements = 1024L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because cluster is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));
final MapStateDescriptor<Integer, Tuple2<Integer, Long>> mapStateDescriptor = new MapStateDescriptor<>(
"timon", BasicTypeInfo.INT_TYPE_INFO, source.getType());
mapStateDescriptor.setQueryable("timon-queryable");
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = 8470749712274833552L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).process(new ProcessFunction<Tuple2<Integer, Long>, Object>() {
private static final long serialVersionUID = -805125545438296619L;
private transient MapState<Integer, Tuple2<Integer, Long>> mapState;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
mapState = getRuntimeContext().getMapState(mapStateDescriptor);
}
@Override
public void processElement(Tuple2<Integer, Long> value, Context ctx, Collector<Object> out) throws Exception {
Tuple2<Integer, Long> v = mapState.get(value.f0);
if (v == null) {
v = new Tuple2<>(value.f0, 0L);
}
mapState.put(value.f0, new Tuple2<>(v.f0, v.f1 + value.f1));
}
});
try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {
final JobID jobId = autoCancellableJob.getJobId();
final JobGraph jobGraph = autoCancellableJob.getJobGraph();
ClientUtils.submitJob(clusterClient, jobGraph);
final long expected = numElements * (numElements + 1L) / 2L;
for (int key = 0; key < maxParallelism; key++) {
boolean success = false;
while (deadline.hasTimeLeft() && !success) {
CompletableFuture<MapState<Integer, Tuple2<Integer, Long>>> future = getKvState(
deadline,
client,
jobId,
"timon-queryable",
key,
BasicTypeInfo.INT_TYPE_INFO,
mapStateDescriptor,
false,
executor);
Tuple2<Integer, Long> value =
future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get(key);
if (value != null && value.f0 != null && expected == value.f1) {
assertEquals("Key mismatch", key, value.f0.intValue());
success = true;
} else {
// Retry
Thread.sleep(RETRY_TIMEOUT);
}
}
assertTrue("Did not succeed query", success);
}
}
}
/**
* Tests simple list state queryable state instance. Each source emits
* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then
* queried. The list state instance add the values to the list. The test
* succeeds after each subtask index is queried and the list contains
* the correct number of distinct elements.
*/
@Test
public void testListState() throws Exception {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final long numElements = 1024L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because cluster is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));
final ListStateDescriptor<Long> listStateDescriptor = new ListStateDescriptor<Long>(
"list", BasicTypeInfo.LONG_TYPE_INFO);
listStateDescriptor.setQueryable("list-queryable");
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = 8470749712274833552L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).process(new ProcessFunction<Tuple2<Integer, Long>, Object>() {
private static final long serialVersionUID = -805125545438296619L;
private transient ListState<Long> listState;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
listState = getRuntimeContext().getListState(listStateDescriptor);
}
@Override
public void processElement(Tuple2<Integer, Long> value, Context ctx, Collector<Object> out) throws Exception {
listState.add(value.f1);
}
});
try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {
final JobID jobId = autoCancellableJob.getJobId();
final JobGraph jobGraph = autoCancellableJob.getJobGraph();
ClientUtils.submitJob(clusterClient, jobGraph);
final Map<Integer, Set<Long>> results = new HashMap<>();
for (int key = 0; key < maxParallelism; key++) {
boolean success = false;
while (deadline.hasTimeLeft() && !success) {
final CompletableFuture<ListState<Long>> future = getKvState(
deadline,
client,
jobId,
"list-queryable",
key,
BasicTypeInfo.INT_TYPE_INFO,
listStateDescriptor,
false,
executor);
Iterable<Long> value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();
Set<Long> res = new HashSet<>();
for (Long v: value) {
res.add(v);
}
// the source starts at 0, so +1
if (res.size() == numElements + 1L) {
success = true;
results.put(key, res);
} else {
// Retry
Thread.sleep(RETRY_TIMEOUT);
}
}
assertTrue("Did not succeed query", success);
}
for (int key = 0; key < maxParallelism; key++) {
Set<Long> values = results.get(key);
for (long i = 0L; i <= numElements; i++) {
assertTrue(values.contains(i));
}
}
}
}
@Test
public void testAggregatingState() throws Exception {
final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
final long numElements = 1024L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(stateBackend);
env.setParallelism(maxParallelism);
// Very important, because cluster is shared between tests and we
// don't explicitly check that all slots are available before
// submitting.
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));
DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));
final AggregatingStateDescriptor<Tuple2<Integer, Long>, String, String> aggrStateDescriptor =
new AggregatingStateDescriptor<>("aggregates", new SumAggr(), String.class);
aggrStateDescriptor.setQueryable("aggr-queryable");
source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
private static final long serialVersionUID = 8470749712274833552L;
@Override
public Integer getKey(Tuple2<Integer, Long> value) {
return value.f0;
}
}).transform(
"TestAggregatingOperator",
BasicTypeInfo.STRING_TYPE_INFO,
new AggregatingTestOperator(aggrStateDescriptor)
);
try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {
final JobID jobId = autoCancellableJob.getJobId();
final JobGraph jobGraph = autoCancellableJob.getJobGraph();
ClientUtils.submitJob(clusterClient, jobGraph);
for (int key = 0; key < maxParallelism; key++) {
boolean success = false;
while (deadline.hasTimeLeft() && !success) {
CompletableFuture<AggregatingState<Tuple2<Integer, Long>, String>> future = getKvState(
deadline,
client,
jobId,
"aggr-queryable",
key,
BasicTypeInfo.INT_TYPE_INFO,
aggrStateDescriptor,
false,
executor);
String value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();
if (Long.parseLong(value) == numElements * (numElements + 1L) / 2L) {
success = true;
} else {
// Retry
Thread.sleep(RETRY_TIMEOUT);
}
}
assertTrue("Did not succeed query", success);
}
}
}
///// Sources/UDFs Used in the Tests //////
/**
* Test source producing (key, 0)..(key, maxValue) with key being the sub
* task index.
*
* <p>After all tuples have been emitted, the source waits to be cancelled
* and does not immediately finish.
*/
private static class TestAscendingValueSource extends RichParallelSourceFunction<Tuple2<Integer, Long>> {
private static final long serialVersionUID = 1459935229498173245L;
private final long maxValue;
private volatile boolean isRunning = true;
TestAscendingValueSource(long maxValue) {
Preconditions.checkArgument(maxValue >= 0);
this.maxValue = maxValue;
}
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
}
@Override
public void run(SourceContext<Tuple2<Integer, Long>> ctx) throws Exception {
// f0 => key
int key = getRuntimeContext().getIndexOfThisSubtask();
Tuple2<Integer, Long> record = new Tuple2<>(key, 0L);
long currentValue = 0;
while (isRunning && currentValue <= maxValue) {
synchronized (ctx.getCheckpointLock()) {
record.f1 = currentValue;
ctx.collect(record);
}
currentValue++;
}
while (isRunning) {
synchronized (this) {
wait();
}
}
}
@Override
public void cancel() {
isRunning = false;
synchronized (this) {
notifyAll();
}
}
}
/**
* Test source producing (key, 1) tuples with random key in key range (numKeys).
*/
private static class TestKeyRangeSource extends RichParallelSourceFunction<Tuple2<Integer, Long>> implements CheckpointListener {
private static final long serialVersionUID = -5744725196953582710L;
private static final AtomicLong LATEST_CHECKPOINT_ID = new AtomicLong();
private final int numKeys;
private final ThreadLocalRandom random = ThreadLocalRandom.current();
private volatile boolean isRunning = true;
private int counter = 0;
TestKeyRangeSource(int numKeys) {
this.numKeys = numKeys;
}
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
if (getRuntimeContext().getIndexOfThisSubtask() == 0) {
LATEST_CHECKPOINT_ID.set(0L);
}
}
@Override
public void run(SourceContext<Tuple2<Integer, Long>> ctx) throws Exception {
// f0 => key
Tuple2<Integer, Long> record = new Tuple2<>(0, 1L);
while (isRunning) {
synchronized (ctx.getCheckpointLock()) {
record.f0 = random.nextInt(numKeys);
ctx.collect(record);
counter++;
}
if (counter % 50 == 0) {
// mild slow down
Thread.sleep(1L);
}
}
}
@Override
public void cancel() {
isRunning = false;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
if (getRuntimeContext().getIndexOfThisSubtask() == 0) {
LATEST_CHECKPOINT_ID.set(checkpointId);
}
}
}
/**
* An operator that uses {@link AggregatingState}.
*
* <p>The operator exists for lack of possibility to get an
* {@link AggregatingState} from the {@link org.apache.flink.api.common.functions.RuntimeContext}.
* If this were not the case, we could have a {@link ProcessFunction}.
*/
private static class AggregatingTestOperator
extends AbstractStreamOperator<String>
implements OneInputStreamOperator<Tuple2<Integer, Long>, String> {
private static final long serialVersionUID = 1L;
private final AggregatingStateDescriptor<Tuple2<Integer, Long>, String, String> stateDescriptor;
private transient AggregatingState<Tuple2<Integer, Long>, String> state;
AggregatingTestOperator(AggregatingStateDescriptor<Tuple2<Integer, Long>, String, String> stateDesc) {
this.stateDescriptor = stateDesc;
}
@Override
public void open() throws Exception {
super.open();
this.state = getKeyedStateBackend().getPartitionedState(
VoidNamespace.INSTANCE,
VoidNamespaceSerializer.INSTANCE,
stateDescriptor);
}
@Override
public void processElement(StreamRecord<Tuple2<Integer, Long>> element) throws Exception {
state.add(element.getValue());
}
}
/**
* Test {@link AggregateFunction} concatenating the already stored string with the long passed as argument.
*/
private static class SumAggr implements AggregateFunction<Tuple2<Integer, Long>, String, String> {
private static final long serialVersionUID = -6249227626701264599L;
@Override
public String createAccumulator() {
return "0";
}
@Override
public String add(Tuple2<Integer, Long> value, String accumulator) {
long acc = Long.valueOf(accumulator);
acc += value.f1;
return Long.toString(acc);
}
@Override
public String getResult(String accumulator) {
return accumulator;
}
@Override
public String merge(String a, String b) {
return Long.toString(Long.valueOf(a) + Long.valueOf(b));
}
}
/**
* Test {@link FoldFunction} concatenating the already stored string with the long passed as argument.
*/
private static class SumFold implements FoldFunction<Tuple2<Integer, Long>, String> {
private static final long serialVersionUID = -6249227626701264599L;
@Override
public String fold(String accumulator, Tuple2<Integer, Long> value) throws Exception {
long acc = Long.valueOf(accumulator);
acc += value.f1;
return Long.toString(acc);
}
}
/**
* Test {@link ReduceFunction} summing up its two arguments.
*/
protected static class SumReduce implements ReduceFunction<Tuple2<Integer, Long>> {
private static final long serialVersionUID = -8651235077342052336L;
@Override
public Tuple2<Integer, Long> reduce(Tuple2<Integer, Long> value1, Tuple2<Integer, Long> value2) throws Exception {
value1.f1 += value2.f1;
return value1;
}
}
///// General Utility Methods //////
/**
* A wrapper of the job graph that makes sure to cancel the job and wait for
* termination after the execution of every test.
*/
private static class AutoCancellableJob implements AutoCloseable {
private final ClusterClient<?> clusterClient;
private final JobGraph jobGraph;
private final JobID jobId;
private final Deadline deadline;
AutoCancellableJob(Deadline deadline, final ClusterClient<?> clusterClient, final StreamExecutionEnvironment env) {
Preconditions.checkNotNull(env);
this.clusterClient = Preconditions.checkNotNull(clusterClient);
this.jobGraph = env.getStreamGraph().getJobGraph();
this.jobId = Preconditions.checkNotNull(jobGraph.getJobID());
this.deadline = deadline;
}
JobGraph getJobGraph() {
return jobGraph;
}
JobID getJobId() {
return jobId;
}
@Override
public void close() throws Exception {
// Free cluster resources
clusterClient.cancel(jobId).get();
// cancel() is non-blocking so do this to make sure the job finished
CompletableFuture<JobStatus> jobStatusFuture = FutureUtils.retrySuccessfulWithDelay(
() -> clusterClient.getJobStatus(jobId),
Time.milliseconds(50),
deadline,
(jobStatus) -> jobStatus.equals(JobStatus.CANCELED),
TestingUtils.defaultScheduledExecutor());
assertEquals(
JobStatus.CANCELED,
jobStatusFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS));
}
}
private static <K, S extends State, V> CompletableFuture<S> getKvState(
final Deadline deadline,
final QueryableStateClient client,
final JobID jobId,
final String queryName,
final K key,
final TypeInformation<K> keyTypeInfo,
final StateDescriptor<S, V> stateDescriptor,
final boolean failForUnknownKeyOrNamespace,
final ScheduledExecutor executor) {
final CompletableFuture<S> resultFuture = new CompletableFuture<>();
getKvStateIgnoringCertainExceptions(
deadline, resultFuture, client, jobId, queryName, key, keyTypeInfo,
stateDescriptor, failForUnknownKeyOrNamespace, executor);
return resultFuture;
}
private static <K, S extends State, V> void getKvStateIgnoringCertainExceptions(
final Deadline deadline,
final CompletableFuture<S> resultFuture,
final QueryableStateClient client,
final JobID jobId,
final String queryName,
final K key,
final TypeInformation<K> keyTypeInfo,
final StateDescriptor<S, V> stateDescriptor,
final boolean failForUnknownKeyOrNamespace,
final ScheduledExecutor executor) {
if (!resultFuture.isDone()) {
CompletableFuture<S> expected = client.getKvState(jobId, queryName, key, keyTypeInfo, stateDescriptor);
expected.whenCompleteAsync((result, throwable) -> {
if (throwable != null) {
if (
throwable.getCause() instanceof CancellationException ||
throwable.getCause() instanceof AssertionError ||
(failForUnknownKeyOrNamespace && throwable.getCause() instanceof UnknownKeyOrNamespaceException)
) {
resultFuture.completeExceptionally(throwable.getCause());
} else if (deadline.hasTimeLeft()) {
getKvStateIgnoringCertainExceptions(
deadline, resultFuture, client, jobId, queryName, key, keyTypeInfo,
stateDescriptor, failForUnknownKeyOrNamespace, executor);
}
} else {
resultFuture.complete(result);
}
}, executor);
resultFuture.whenComplete((result, throwable) -> expected.cancel(false));
}
}
/**
* Retry a query for state for keys between 0 and {@link #maxParallelism} until
* <tt>expected</tt> equals the value of the result tuple's second field.
*/
private void executeValueQuery(
final Deadline deadline,
final QueryableStateClient client,
final JobID jobId,
final String queryableStateName,
final ValueStateDescriptor<Tuple2<Integer, Long>> stateDescriptor,
final long expected) throws Exception {
for (int key = 0; key < maxParallelism; key++) {
boolean success = false;
while (deadline.hasTimeLeft() && !success) {
CompletableFuture<ValueState<Tuple2<Integer, Long>>> future = getKvState(
deadline,
client,
jobId,
queryableStateName,
key,
BasicTypeInfo.INT_TYPE_INFO,
stateDescriptor,
false,
executor);
Tuple2<Integer, Long> value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).value();
assertEquals("Key mismatch", key, value.f0.intValue());
if (expected == value.f1) {
success = true;
} else {
// Retry
Thread.sleep(RETRY_TIMEOUT);
}
}
assertTrue("Did not succeed query", success);
}
}
}
| |
package it.unibz.krdb.sql;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import it.unibz.krdb.obda.owlapi3.OWLAPI3TranslatorUtility;
import it.unibz.krdb.obda.owlrefplatform.core.QuestConstants;
import it.unibz.krdb.obda.owlrefplatform.core.QuestDBConnection;
import it.unibz.krdb.obda.owlrefplatform.core.QuestDBStatement;
import it.unibz.krdb.obda.owlrefplatform.core.QuestPreferences;
import it.unibz.krdb.obda.r2rml.R2RMLManager;
import it.unibz.krdb.sql.DBMetadata;
import it.unibz.krdb.sql.TableDefinition;
import it.unibz.krdb.sql.api.Attribute;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.io.File;
import java.util.Scanner;
import org.junit.After;
import org.junit.Test;
import org.openrdf.model.Model;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import sesameWrapper.SesameVirtualRepo;
/**
* Tests that user-applied constraints can be provided through
* sesameWrapper.SesameVirtualRepo
* with manually instantiated metadata.
*
* This is quite similar to the setting in the optique platform
*
* Some stuff copied from ExampleManualMetadata
*
* @author dhovl
*
*/
public class TestSesameImplicitDBConstraints {
static String owlfile = "src/test/resources/userconstraints/uc.owl";
static String obdafile = "src/test/resources/userconstraints/uc.obda";
static String r2rmlfile = "src/test/resources/userconstraints/uc.ttl";
static String uc_keyfile = "src/test/resources/userconstraints/keys.lst";
static String uc_create = "src/test/resources/userconstraints/create.sql";
private Connection sqlConnection;
private OWLAPI3TranslatorUtility translator = new OWLAPI3TranslatorUtility();
private QuestDBStatement qst = null;
/*
* prepare ontop for rewriting and unfolding steps
*/
public void init(boolean applyUserConstraints, boolean provideMetadata) throws Exception {
DBMetadata dbMetadata;
QuestPreferences preference;
OWLOntology ontology;
Model model;
sqlConnection= DriverManager.getConnection("jdbc:h2:mem:countries","sa", "");
java.sql.Statement s = sqlConnection.createStatement();
try {
String text = new Scanner( new File(uc_create) ).useDelimiter("\\A").next();
s.execute(text);
//Server.startWebServer(sqlConnection);
} catch(SQLException sqle) {
System.out.println("Exception in creating db from script");
}
s.close();
/*
* Load the ontology from an external .owl file.
*/
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
ontology = manager.loadOntologyFromOntologyDocument(new File(owlfile));
/*
* Load the OBDA model from an external .r2rml file
*/
R2RMLManager rmanager = new R2RMLManager(r2rmlfile);
model = rmanager.getModel();
/*
OBDADataFactory fac = OBDADataFactoryImpl.getInstance();
obdaModel = fac.getOBDAModel();
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load(obdafile);
*/
/*
* Prepare the configuration for the Quest instance. The example below shows the setup for
* "Virtual ABox" mode
*/
preference = new QuestPreferences();
preference.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
preference.setCurrentValueOf(QuestPreferences.DBNAME, "countries");
preference.setCurrentValueOf(QuestPreferences.JDBC_URL, "jdbc:h2:mem:countries");
preference.setCurrentValueOf(QuestPreferences.DBUSER, "sa");
preference.setCurrentValueOf(QuestPreferences.DBPASSWORD, "");
preference.setCurrentValueOf(QuestPreferences.JDBC_DRIVER, "org.h2.Driver");
dbMetadata = getMeta();
SesameVirtualRepo qest1;
if(provideMetadata){
qest1 = new SesameVirtualRepo("", ontology, model, dbMetadata, preference);
if(applyUserConstraints){
// Parsing user constraints
ImplicitDBConstraints userConstraints = new ImplicitDBConstraints(uc_keyfile);
qest1.setImplicitDBConstraints(userConstraints);
}
} else {
qest1 = new SesameVirtualRepo("", ontology, model, preference);
if(applyUserConstraints){
// Parsing user constraints
ImplicitDBConstraints userConstraints = new ImplicitDBConstraints(uc_keyfile);
qest1.setImplicitDBConstraints(userConstraints);
}
}
qest1.initialize();
/*
* Prepare the data connection for querying.
*/
QuestDBConnection conn = qest1.getQuestConnection();
qst = conn.createStatement();
}
@After
public void tearDown() throws Exception{
if (!sqlConnection.isClosed()) {
java.sql.Statement s = sqlConnection.createStatement();
try {
s.execute("DROP ALL OBJECTS DELETE FILES");
} catch (SQLException sqle) {
System.out.println("Table not found, not dropping");
} finally {
s.close();
sqlConnection.close();
}
}
}
private TableDefinition defTable(String name){
TableDefinition tableDefinition = new TableDefinition(name);
Attribute attribute = null;
//It starts from 1 !!!
attribute = new Attribute("COL1", java.sql.Types.INTEGER, false, null);
tableDefinition.addAttribute(attribute);
attribute = new Attribute("COL2", java.sql.Types.INTEGER, false, null);
tableDefinition.addAttribute(attribute);
return tableDefinition;
}
private DBMetadata getMeta(){
DBMetadata dbMetadata = new DBMetadata("org.h2.Driver");
dbMetadata.add(defTable("TABLE1"));
dbMetadata.add(defTable("TABLE2"));
dbMetadata.add(defTable("TABLE3"));
return dbMetadata;
}
@Test
public void testWithSelfJoinElimManualMetadata() throws Exception {
init(true, true);
String query = "PREFIX : <http://www.semanticweb.org/ontologies/2013/7/untitled-ontology-150#> SELECT * WHERE {?x :hasVal1 ?v1; :hasVal2 ?v2.}";
String sql = qst.getSQL(query);
boolean m = sql.matches("(?ms)(.*)TABLE1(.*),(.*)TABLE1(.*)");
assertFalse(m);
}
@Test
public void testWithoutSelfJoinElimManualMetadata() throws Exception {
init(false, true);
String query = "PREFIX : <http://www.semanticweb.org/ontologies/2013/7/untitled-ontology-150#> SELECT * WHERE {?x :hasVal1 ?v1; :hasVal2 ?v2.}";
String sql = qst.getSQL(query);
boolean m = sql.matches("(?ms)(.*)TABLE1(.*),(.*)TABLE1(.*)");
assertTrue(m);
}
@Test
public void testWithSelfJoinElimNoMetadata() throws Exception {
init(true, false);
String query = "PREFIX : <http://www.semanticweb.org/ontologies/2013/7/untitled-ontology-150#> SELECT * WHERE {?x :hasVal1 ?v1; :hasVal2 ?v2.}";
String sql = qst.getSQL(query);
boolean m = sql.matches("(?ms)(.*)TABLE1(.*),(.*)TABLE1(.*)");
assertFalse(m);
}
@Test
public void testWithoutSelfJoinElimNoMetadata() throws Exception {
init(false, false);
String query = "PREFIX : <http://www.semanticweb.org/ontologies/2013/7/untitled-ontology-150#> SELECT * WHERE {?x :hasVal1 ?v1; :hasVal2 ?v2.}";
String sql = qst.getSQL(query);
boolean m = sql.matches("(?ms)(.*)TABLE1(.*),(.*)TABLE1(.*)");
assertTrue(m);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.google.common.base.Strings;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.util.List;
/**
*
*/
public interface FieldMapper<T> extends Mapper {
public static final String DOC_VALUES_FORMAT = "doc_values_format";
public static class Names {
private final String name;
private final String indexName;
private final String indexNameClean;
private final String fullName;
private final String sourcePath;
public Names(String name) {
this(name, name, name, name);
}
public Names(String name, String indexName, String indexNameClean, String fullName) {
this(name, indexName, indexNameClean, fullName, fullName);
}
public Names(String name, String indexName, String indexNameClean, String fullName, @Nullable String sourcePath) {
this.name = name;
this.indexName = indexName;
this.indexNameClean = indexNameClean;
this.fullName = fullName;
this.sourcePath = sourcePath == null ? this.fullName : sourcePath;
}
/**
* The logical name of the field.
*/
public String name() {
return name;
}
/**
* The indexed name of the field. This is the name under which we will
* store it in the index.
*/
public String indexName() {
return indexName;
}
/**
* The cleaned index name, before any "path" modifications performed on it.
*/
public String indexNameClean() {
return indexNameClean;
}
/**
* The full name, including dot path.
*/
public String fullName() {
return fullName;
}
/**
* The dot path notation to extract the value from source.
*/
public String sourcePath() {
return sourcePath;
}
/**
* Creates a new index term based on the provided value.
*/
public Term createIndexNameTerm(String value) {
return new Term(indexName, value);
}
/**
* Creates a new index term based on the provided value.
*/
public Term createIndexNameTerm(BytesRef value) {
return new Term(indexName, value);
}
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
Names names = (Names) o;
if (!fullName.equals(names.fullName)) return false;
if (!indexName.equals(names.indexName)) return false;
if (!indexNameClean.equals(names.indexNameClean)) return false;
if (!name.equals(names.name)) return false;
if (!sourcePath.equals(names.sourcePath)) return false;
return true;
}
@Override
public int hashCode() {
int result = name.hashCode();
result = 31 * result + indexName.hashCode();
result = 31 * result + indexNameClean.hashCode();
result = 31 * result + fullName.hashCode();
result = 31 * result + sourcePath.hashCode();
return result;
}
}
public static enum Loading {
LAZY {
@Override
public String toString() {
return LAZY_VALUE;
}
},
EAGER {
@Override
public String toString() {
return EAGER_VALUE;
}
},
EAGER_GLOBAL_ORDINALS {
@Override
public String toString() {
return EAGER_GLOBAL_ORDINALS_VALUE;
}
};
public static final String KEY = "loading";
public static final String EAGER_GLOBAL_ORDINALS_VALUE = "eager_global_ordinals";
public static final String EAGER_VALUE = "eager";
public static final String LAZY_VALUE = "lazy";
public static Loading parse(String loading, Loading defaultValue) {
if (Strings.isNullOrEmpty(loading)) {
return defaultValue;
} else if (EAGER_GLOBAL_ORDINALS_VALUE.equalsIgnoreCase(loading)) {
return EAGER_GLOBAL_ORDINALS;
} else if (EAGER_VALUE.equalsIgnoreCase(loading)) {
return EAGER;
} else if (LAZY_VALUE.equalsIgnoreCase(loading)) {
return LAZY;
} else {
throw new MapperParsingException("Unknown [" + KEY + "] value: [" + loading + "]");
}
}
}
Names names();
FieldType fieldType();
float boost();
/**
* The analyzer that will be used to index the field.
*/
Analyzer indexAnalyzer();
/**
* The analyzer that will be used to search the field.
*/
Analyzer searchAnalyzer();
/**
* The analyzer that will be used for quoted search on the field.
*/
Analyzer searchQuoteAnalyzer();
/**
* Similarity used for scoring queries on the field
*/
SimilarityProvider similarity();
/**
* List of fields where this field should be copied to
*/
public AbstractFieldMapper.CopyTo copyTo();
/**
* Returns the actual value of the field.
*/
T value(Object value);
/**
* Returns the value that will be used as a result for search. Can be only of specific types... .
*/
Object valueForSearch(Object value);
/**
* Returns the indexed value used to construct search "values".
*/
BytesRef indexedValueForSearch(Object value);
/**
* Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this
* field in query string.
*/
boolean useTermQueryWithQueryString();
Query termQuery(Object value, @Nullable QueryParseContext context);
Filter termFilter(Object value, @Nullable QueryParseContext context);
Filter termsFilter(List values, @Nullable QueryParseContext context);
Filter fieldDataTermsFilter(List values, @Nullable QueryParseContext context);
Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
Filter rangeFilter(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions);
Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context);
Filter prefixFilter(Object value, @Nullable QueryParseContext context);
Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context);
Filter regexpFilter(Object value, int flags, int maxDeterminizedStates, @Nullable QueryParseContext parseContext);
/**
* A term query to use when parsing a query string. Can return <tt>null</tt>.
*/
@Nullable
Query queryStringTermQuery(Term term);
/**
* Null value filter, returns <tt>null</tt> if there is no null value associated with the field.
*/
@Nullable
Filter nullValueFilter();
FieldDataType fieldDataType();
PostingsFormatProvider postingsFormatProvider();
DocValuesFormatProvider docValuesFormatProvider();
boolean isNumeric();
boolean isSortable();
boolean supportsNullValue();
boolean hasDocValues();
Loading normsLoading(Loading defaultLoading);
/**
* Fields might not be available before indexing, for example _all, token_count,...
* When get is called and these fields are requested, this case needs special treatment.
*
* @return If the field is available before indexing or not.
* */
public boolean isGenerated();
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.PlaybackParams;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.Nullable;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.MetadataRenderer;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.text.Cue;
import com.google.android.exoplayer2.text.TextRenderer;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.trackselection.TrackSelector;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.VideoRendererEventListener;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.CopyOnWriteArraySet;
/**
* An {@link ExoPlayer} implementation that uses default {@link Renderer} components. Instances can
* be obtained from {@link ExoPlayerFactory}.
*/
@TargetApi(16)
public class SimpleExoPlayer implements ExoPlayer {
/**
* A listener for video rendering information from a {@link SimpleExoPlayer}.
*/
public interface VideoListener {
/**
* Called each time there's a change in the size of the video being rendered.
*
* @param width The video width in pixels.
* @param height The video height in pixels.
* @param unappliedRotationDegrees For videos that require a rotation, this is the clockwise
* rotation in degrees that the application should apply for the video for it to be rendered
* in the correct orientation. This value will always be zero on API levels 21 and above,
* since the renderer will apply all necessary rotations internally. On earlier API levels
* this is not possible. Applications that use {@link android.view.TextureView} can apply
* the rotation by calling {@link android.view.TextureView#setTransform}. Applications that
* do not expect to encounter rotated videos can safely ignore this parameter.
* @param pixelWidthHeightRatio The width to height ratio of each pixel. For the normal case
* of square pixels this will be equal to 1.0. Different values are indicative of anamorphic
* content.
*/
void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees,
float pixelWidthHeightRatio);
/**
* Called when a frame is rendered for the first time since setting the surface, and when a
* frame is rendered for the first time since a video track was selected.
*/
void onRenderedFirstFrame();
}
private static final String TAG = "SimpleExoPlayer";
protected final Renderer[] renderers;
private final ExoPlayer player;
private final ComponentListener componentListener;
private final CopyOnWriteArraySet<VideoListener> videoListeners;
private final CopyOnWriteArraySet<TextRenderer.Output> textOutputs;
private final CopyOnWriteArraySet<MetadataRenderer.Output> metadataOutputs;
private final int videoRendererCount;
private final int audioRendererCount;
private Format videoFormat;
private Format audioFormat;
private Surface surface;
private boolean ownsSurface;
@C.VideoScalingMode
private int videoScalingMode;
private SurfaceHolder surfaceHolder;
private TextureView textureView;
private AudioRendererEventListener audioDebugListener;
private VideoRendererEventListener videoDebugListener;
private DecoderCounters videoDecoderCounters;
private DecoderCounters audioDecoderCounters;
private int audioSessionId;
private AudioAttributes audioAttributes;
private float audioVolume;
protected SimpleExoPlayer(RenderersFactory renderersFactory, TrackSelector trackSelector,
LoadControl loadControl) {
componentListener = new ComponentListener();
videoListeners = new CopyOnWriteArraySet<>();
textOutputs = new CopyOnWriteArraySet<>();
metadataOutputs = new CopyOnWriteArraySet<>();
Looper eventLooper = Looper.myLooper() != null ? Looper.myLooper() : Looper.getMainLooper();
Handler eventHandler = new Handler(eventLooper);
renderers = renderersFactory.createRenderers(eventHandler, componentListener, componentListener,
componentListener, componentListener);
// Obtain counts of video and audio renderers.
int videoRendererCount = 0;
int audioRendererCount = 0;
for (Renderer renderer : renderers) {
switch (renderer.getTrackType()) {
case C.TRACK_TYPE_VIDEO:
videoRendererCount++;
break;
case C.TRACK_TYPE_AUDIO:
audioRendererCount++;
break;
}
}
this.videoRendererCount = videoRendererCount;
this.audioRendererCount = audioRendererCount;
// Set initial values.
audioVolume = 1;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
audioAttributes = AudioAttributes.DEFAULT;
videoScalingMode = C.VIDEO_SCALING_MODE_DEFAULT;
// Build the player and associated objects.
player = new ExoPlayerImpl(renderers, trackSelector, loadControl);
}
/**
* Sets the video scaling mode.
* <p>
* Note that the scaling mode only applies if a {@link MediaCodec}-based video {@link Renderer} is
* enabled and if the output surface is owned by a {@link android.view.SurfaceView}.
*
* @param videoScalingMode The video scaling mode.
*/
public void setVideoScalingMode(@C.VideoScalingMode int videoScalingMode) {
this.videoScalingMode = videoScalingMode;
ExoPlayerMessage[] messages = new ExoPlayerMessage[videoRendererCount];
int count = 0;
for (Renderer renderer : renderers) {
if (renderer.getTrackType() == C.TRACK_TYPE_VIDEO) {
messages[count++] = new ExoPlayerMessage(renderer, C.MSG_SET_SCALING_MODE,
videoScalingMode);
}
}
player.sendMessages(messages);
}
/**
* Returns the video scaling mode.
*/
public @C.VideoScalingMode int getVideoScalingMode() {
return videoScalingMode;
}
/**
* Clears any {@link Surface}, {@link SurfaceHolder}, {@link SurfaceView} or {@link TextureView}
* currently set on the player.
*/
public void clearVideoSurface() {
setVideoSurface(null);
}
/**
* Sets the {@link Surface} onto which video will be rendered. The caller is responsible for
* tracking the lifecycle of the surface, and must clear the surface by calling
* {@code setVideoSurface(null)} if the surface is destroyed.
* <p>
* If the surface is held by a {@link SurfaceView}, {@link TextureView} or {@link SurfaceHolder}
* then it's recommended to use {@link #setVideoSurfaceView(SurfaceView)},
* {@link #setVideoTextureView(TextureView)} or {@link #setVideoSurfaceHolder(SurfaceHolder)}
* rather than this method, since passing the holder allows the player to track the lifecycle of
* the surface automatically.
*
* @param surface The {@link Surface}.
*/
public void setVideoSurface(Surface surface) {
removeSurfaceCallbacks();
setVideoSurfaceInternal(surface, false);
}
/**
* Clears the {@link Surface} onto which video is being rendered if it matches the one passed.
* Else does nothing.
*
* @param surface The surface to clear.
*/
public void clearVideoSurface(Surface surface) {
if (surface != null && surface == this.surface) {
setVideoSurface(null);
}
}
/**
* Sets the {@link SurfaceHolder} that holds the {@link Surface} onto which video will be
* rendered. The player will track the lifecycle of the surface automatically.
*
* @param surfaceHolder The surface holder.
*/
public void setVideoSurfaceHolder(SurfaceHolder surfaceHolder) {
removeSurfaceCallbacks();
this.surfaceHolder = surfaceHolder;
if (surfaceHolder == null) {
setVideoSurfaceInternal(null, false);
} else {
surfaceHolder.addCallback(componentListener);
Surface surface = surfaceHolder.getSurface();
setVideoSurfaceInternal(surface != null && surface.isValid() ? surface : null, false);
}
}
/**
* Clears the {@link SurfaceHolder} that holds the {@link Surface} onto which video is being
* rendered if it matches the one passed. Else does nothing.
*
* @param surfaceHolder The surface holder to clear.
*/
public void clearVideoSurfaceHolder(SurfaceHolder surfaceHolder) {
if (surfaceHolder != null && surfaceHolder == this.surfaceHolder) {
setVideoSurfaceHolder(null);
}
}
/**
* Sets the {@link SurfaceView} onto which video will be rendered. The player will track the
* lifecycle of the surface automatically.
*
* @param surfaceView The surface view.
*/
public void setVideoSurfaceView(SurfaceView surfaceView) {
setVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder());
}
/**
* Clears the {@link SurfaceView} onto which video is being rendered if it matches the one passed.
* Else does nothing.
*
* @param surfaceView The texture view to clear.
*/
public void clearVideoSurfaceView(SurfaceView surfaceView) {
clearVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder());
}
/**
* Sets the {@link TextureView} onto which video will be rendered. The player will track the
* lifecycle of the surface automatically.
*
* @param textureView The texture view.
*/
public void setVideoTextureView(TextureView textureView) {
removeSurfaceCallbacks();
this.textureView = textureView;
if (textureView == null) {
setVideoSurfaceInternal(null, true);
} else {
if (textureView.getSurfaceTextureListener() != null) {
Log.w(TAG, "Replacing existing SurfaceTextureListener.");
}
textureView.setSurfaceTextureListener(componentListener);
SurfaceTexture surfaceTexture = textureView.isAvailable() ? textureView.getSurfaceTexture()
: null;
setVideoSurfaceInternal(surfaceTexture == null ? null : new Surface(surfaceTexture), true);
}
}
/**
* Clears the {@link TextureView} onto which video is being rendered if it matches the one passed.
* Else does nothing.
*
* @param textureView The texture view to clear.
*/
public void clearVideoTextureView(TextureView textureView) {
if (textureView != null && textureView == this.textureView) {
setVideoTextureView(null);
}
}
/**
* Sets the stream type for audio playback, used by the underlying audio track.
* <p>
* Setting the stream type during playback may introduce a short gap in audio output as the audio
* track is recreated. A new audio session id will also be generated.
* <p>
* Calling this method overwrites any attributes set previously by calling
* {@link #setAudioAttributes(AudioAttributes)}.
*
* @deprecated Use {@link #setAudioAttributes(AudioAttributes)}.
* @param streamType The stream type for audio playback.
*/
@Deprecated
public void setAudioStreamType(@C.StreamType int streamType) {
@C.AudioUsage int usage = Util.getAudioUsageForStreamType(streamType);
@C.AudioContentType int contentType = Util.getAudioContentTypeForStreamType(streamType);
AudioAttributes audioAttributes =
new AudioAttributes.Builder().setUsage(usage).setContentType(contentType).build();
setAudioAttributes(audioAttributes);
}
/**
* Returns the stream type for audio playback.
*
* @deprecated Use {@link #getAudioAttributes()}.
*/
@Deprecated
public @C.StreamType int getAudioStreamType() {
return Util.getStreamTypeForAudioUsage(audioAttributes.usage);
}
/**
* Sets the attributes for audio playback, used by the underlying audio track. If not set, the
* default audio attributes will be used. They are suitable for general media playback.
* <p>
* Setting the audio attributes during playback may introduce a short gap in audio output as the
* audio track is recreated. A new audio session id will also be generated.
* <p>
* If tunneling is enabled by the track selector, the specified audio attributes will be ignored,
* but they will take effect if audio is later played without tunneling.
* <p>
* If the device is running a build before platform API version 21, audio attributes cannot be set
* directly on the underlying audio track. In this case, the usage will be mapped onto an
* equivalent stream type using {@link Util#getStreamTypeForAudioUsage(int)}.
*
* @param audioAttributes The attributes to use for audio playback.
*/
public void setAudioAttributes(AudioAttributes audioAttributes) {
this.audioAttributes = audioAttributes;
ExoPlayerMessage[] messages = new ExoPlayerMessage[audioRendererCount];
int count = 0;
for (Renderer renderer : renderers) {
if (renderer.getTrackType() == C.TRACK_TYPE_AUDIO) {
messages[count++] = new ExoPlayerMessage(renderer, C.MSG_SET_AUDIO_ATTRIBUTES,
audioAttributes);
}
}
player.sendMessages(messages);
}
/**
* Returns the attributes for audio playback.
*/
public AudioAttributes getAudioAttributes() {
return audioAttributes;
}
/**
* Sets the audio volume, with 0 being silence and 1 being unity gain.
*
* @param audioVolume The audio volume.
*/
public void setVolume(float audioVolume) {
this.audioVolume = audioVolume;
ExoPlayerMessage[] messages = new ExoPlayerMessage[audioRendererCount];
int count = 0;
for (Renderer renderer : renderers) {
if (renderer.getTrackType() == C.TRACK_TYPE_AUDIO) {
messages[count++] = new ExoPlayerMessage(renderer, C.MSG_SET_VOLUME, audioVolume);
}
}
player.sendMessages(messages);
}
/**
* Returns the audio volume, with 0 being silence and 1 being unity gain.
*/
public float getVolume() {
return audioVolume;
}
/**
* Sets the {@link PlaybackParams} governing audio playback.
*
* @deprecated Use {@link #setPlaybackParameters(PlaybackParameters)}.
* @param params The {@link PlaybackParams}, or null to clear any previously set parameters.
*/
@Deprecated
@TargetApi(23)
public void setPlaybackParams(@Nullable PlaybackParams params) {
PlaybackParameters playbackParameters;
if (params != null) {
params.allowDefaults();
playbackParameters = new PlaybackParameters(params.getSpeed(), params.getPitch());
} else {
playbackParameters = null;
}
setPlaybackParameters(playbackParameters);
}
/**
* Returns the video format currently being played, or null if no video is being played.
*/
public Format getVideoFormat() {
return videoFormat;
}
/**
* Returns the audio format currently being played, or null if no audio is being played.
*/
public Format getAudioFormat() {
return audioFormat;
}
/**
* Returns the audio session identifier, or {@link C#AUDIO_SESSION_ID_UNSET} if not set.
*/
public int getAudioSessionId() {
return audioSessionId;
}
/**
* Returns {@link DecoderCounters} for video, or null if no video is being played.
*/
public DecoderCounters getVideoDecoderCounters() {
return videoDecoderCounters;
}
/**
* Returns {@link DecoderCounters} for audio, or null if no audio is being played.
*/
public DecoderCounters getAudioDecoderCounters() {
return audioDecoderCounters;
}
/**
* Adds a listener to receive video events.
*
* @param listener The listener to register.
*/
public void addVideoListener(VideoListener listener) {
videoListeners.add(listener);
}
/**
* Removes a listener of video events.
*
* @param listener The listener to unregister.
*/
public void removeVideoListener(VideoListener listener) {
videoListeners.remove(listener);
}
/**
* Sets a listener to receive video events, removing all existing listeners.
*
* @param listener The listener.
* @deprecated Use {@link #addVideoListener(VideoListener)}.
*/
@Deprecated
public void setVideoListener(VideoListener listener) {
videoListeners.clear();
if (listener != null) {
addVideoListener(listener);
}
}
/**
* Equivalent to {@link #removeVideoListener(VideoListener)}.
*
* @param listener The listener to clear.
* @deprecated Use {@link #removeVideoListener(VideoListener)}.
*/
@Deprecated
public void clearVideoListener(VideoListener listener) {
removeVideoListener(listener);
}
/**
* Registers an output to receive text events.
*
* @param listener The output to register.
*/
public void addTextOutput(TextRenderer.Output listener) {
textOutputs.add(listener);
}
/**
* Removes a text output.
*
* @param listener The output to remove.
*/
public void removeTextOutput(TextRenderer.Output listener) {
textOutputs.remove(listener);
}
/**
* Sets an output to receive text events, removing all existing outputs.
*
* @param output The output.
* @deprecated Use {@link #addTextOutput(TextRenderer.Output)}.
*/
@Deprecated
public void setTextOutput(TextRenderer.Output output) {
textOutputs.clear();
if (output != null) {
addTextOutput(output);
}
}
/**
* Equivalent to {@link #removeTextOutput(TextRenderer.Output)}.
*
* @param output The output to clear.
* @deprecated Use {@link #removeTextOutput(TextRenderer.Output)}.
*/
@Deprecated
public void clearTextOutput(TextRenderer.Output output) {
removeTextOutput(output);
}
/**
* Registers an output to receive metadata events.
*
* @param listener The output to register.
*/
public void addMetadataOutput(MetadataRenderer.Output listener) {
metadataOutputs.add(listener);
}
/**
* Removes a metadata output.
*
* @param listener The output to remove.
*/
public void removeMetadataOutput(MetadataRenderer.Output listener) {
metadataOutputs.remove(listener);
}
/**
* Sets an output to receive metadata events, removing all existing outputs.
*
* @param output The output.
* @deprecated Use {@link #addMetadataOutput(MetadataRenderer.Output)}.
*/
@Deprecated
public void setMetadataOutput(MetadataRenderer.Output output) {
metadataOutputs.clear();
if (output != null) {
addMetadataOutput(output);
}
}
/**
* Equivalent to {@link #removeMetadataOutput(MetadataRenderer.Output)}.
*
* @param output The output to clear.
* @deprecated Use {@link #removeMetadataOutput(MetadataRenderer.Output)}.
*/
@Deprecated
public void clearMetadataOutput(MetadataRenderer.Output output) {
removeMetadataOutput(output);
}
/**
* Sets a listener to receive debug events from the video renderer.
*
* @param listener The listener.
*/
public void setVideoDebugListener(VideoRendererEventListener listener) {
videoDebugListener = listener;
}
/**
* Sets a listener to receive debug events from the audio renderer.
*
* @param listener The listener.
*/
public void setAudioDebugListener(AudioRendererEventListener listener) {
audioDebugListener = listener;
}
// ExoPlayer implementation
@Override
public Looper getPlaybackLooper() {
return player.getPlaybackLooper();
}
@Override
public void addListener(Player.EventListener listener) {
player.addListener(listener);
}
@Override
public void removeListener(Player.EventListener listener) {
player.removeListener(listener);
}
@Override
public int getPlaybackState() {
return player.getPlaybackState();
}
@Override
public void prepare(MediaSource mediaSource) {
player.prepare(mediaSource);
}
@Override
public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) {
player.prepare(mediaSource, resetPosition, resetState);
}
@Override
public void setPlayWhenReady(boolean playWhenReady) {
player.setPlayWhenReady(playWhenReady);
}
@Override
public boolean getPlayWhenReady() {
return player.getPlayWhenReady();
}
@Override
public @RepeatMode int getRepeatMode() {
return player.getRepeatMode();
}
@Override
public void setRepeatMode(@RepeatMode int repeatMode) {
player.setRepeatMode(repeatMode);
}
@Override
public boolean isLoading() {
return player.isLoading();
}
@Override
public void seekToDefaultPosition() {
player.seekToDefaultPosition();
}
@Override
public void seekToDefaultPosition(int windowIndex) {
player.seekToDefaultPosition(windowIndex);
}
@Override
public void seekTo(long positionMs) {
player.seekTo(positionMs);
}
@Override
public void seekTo(int windowIndex, long positionMs) {
player.seekTo(windowIndex, positionMs);
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
player.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
return player.getPlaybackParameters();
}
@Override
public void stop() {
player.stop();
}
@Override
public void release() {
player.release();
removeSurfaceCallbacks();
if (surface != null) {
if (ownsSurface) {
surface.release();
}
surface = null;
}
}
@Override
public void sendMessages(ExoPlayerMessage... messages) {
player.sendMessages(messages);
}
@Override
public void blockingSendMessages(ExoPlayerMessage... messages) {
player.blockingSendMessages(messages);
}
@Override
public int getRendererCount() {
return player.getRendererCount();
}
@Override
public int getRendererType(int index) {
return player.getRendererType(index);
}
@Override
public TrackGroupArray getCurrentTrackGroups() {
return player.getCurrentTrackGroups();
}
@Override
public TrackSelectionArray getCurrentTrackSelections() {
return player.getCurrentTrackSelections();
}
@Override
public Timeline getCurrentTimeline() {
return player.getCurrentTimeline();
}
@Override
public Object getCurrentManifest() {
return player.getCurrentManifest();
}
@Override
public int getCurrentPeriodIndex() {
return player.getCurrentPeriodIndex();
}
@Override
public int getCurrentWindowIndex() {
return player.getCurrentWindowIndex();
}
@Override
public long getDuration() {
return player.getDuration();
}
@Override
public long getCurrentPosition() {
return player.getCurrentPosition();
}
@Override
public long getBufferedPosition() {
return player.getBufferedPosition();
}
@Override
public int getBufferedPercentage() {
return player.getBufferedPercentage();
}
@Override
public boolean isCurrentWindowDynamic() {
return player.isCurrentWindowDynamic();
}
@Override
public boolean isCurrentWindowSeekable() {
return player.isCurrentWindowSeekable();
}
@Override
public boolean isPlayingAd() {
return player.isPlayingAd();
}
@Override
public int getCurrentAdGroupIndex() {
return player.getCurrentAdGroupIndex();
}
@Override
public int getCurrentAdIndexInAdGroup() {
return player.getCurrentAdIndexInAdGroup();
}
@Override
public long getContentPosition() {
return player.getContentPosition();
}
// Internal methods.
private void removeSurfaceCallbacks() {
if (textureView != null) {
if (textureView.getSurfaceTextureListener() != componentListener) {
Log.w(TAG, "SurfaceTextureListener already unset or replaced.");
} else {
textureView.setSurfaceTextureListener(null);
}
textureView = null;
}
if (surfaceHolder != null) {
surfaceHolder.removeCallback(componentListener);
surfaceHolder = null;
}
}
private void setVideoSurfaceInternal(Surface surface, boolean ownsSurface) {
// Note: We don't turn this method into a no-op if the surface is being replaced with itself
// so as to ensure onRenderedFirstFrame callbacks are still called in this case.
ExoPlayerMessage[] messages = new ExoPlayerMessage[videoRendererCount];
int count = 0;
for (Renderer renderer : renderers) {
if (renderer.getTrackType() == C.TRACK_TYPE_VIDEO) {
messages[count++] = new ExoPlayerMessage(renderer, C.MSG_SET_SURFACE, surface);
}
}
if (this.surface != null && this.surface != surface) {
// We're replacing a surface. Block to ensure that it's not accessed after the method returns.
player.blockingSendMessages(messages);
// If we created the previous surface, we are responsible for releasing it.
if (this.ownsSurface) {
this.surface.release();
}
} else {
player.sendMessages(messages);
}
this.surface = surface;
this.ownsSurface = ownsSurface;
}
private final class ComponentListener implements VideoRendererEventListener,
AudioRendererEventListener, TextRenderer.Output, MetadataRenderer.Output,
SurfaceHolder.Callback, TextureView.SurfaceTextureListener {
// VideoRendererEventListener implementation
@Override
public void onVideoEnabled(DecoderCounters counters) {
videoDecoderCounters = counters;
if (videoDebugListener != null) {
videoDebugListener.onVideoEnabled(counters);
}
}
@Override
public void onVideoDecoderInitialized(String decoderName, long initializedTimestampMs,
long initializationDurationMs) {
if (videoDebugListener != null) {
videoDebugListener.onVideoDecoderInitialized(decoderName, initializedTimestampMs,
initializationDurationMs);
}
}
@Override
public void onVideoInputFormatChanged(Format format) {
videoFormat = format;
if (videoDebugListener != null) {
videoDebugListener.onVideoInputFormatChanged(format);
}
}
@Override
public void onDroppedFrames(int count, long elapsed) {
if (videoDebugListener != null) {
videoDebugListener.onDroppedFrames(count, elapsed);
}
}
@Override
public void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees,
float pixelWidthHeightRatio) {
for (VideoListener videoListener : videoListeners) {
videoListener.onVideoSizeChanged(width, height, unappliedRotationDegrees,
pixelWidthHeightRatio);
}
if (videoDebugListener != null) {
videoDebugListener.onVideoSizeChanged(width, height, unappliedRotationDegrees,
pixelWidthHeightRatio);
}
}
@Override
public void onRenderedFirstFrame(Surface surface) {
if (SimpleExoPlayer.this.surface == surface) {
for (VideoListener videoListener : videoListeners) {
videoListener.onRenderedFirstFrame();
}
}
if (videoDebugListener != null) {
videoDebugListener.onRenderedFirstFrame(surface);
}
}
@Override
public void onVideoDisabled(DecoderCounters counters) {
if (videoDebugListener != null) {
videoDebugListener.onVideoDisabled(counters);
}
videoFormat = null;
videoDecoderCounters = null;
}
// AudioRendererEventListener implementation
@Override
public void onAudioEnabled(DecoderCounters counters) {
audioDecoderCounters = counters;
if (audioDebugListener != null) {
audioDebugListener.onAudioEnabled(counters);
}
}
@Override
public void onAudioSessionId(int sessionId) {
audioSessionId = sessionId;
if (audioDebugListener != null) {
audioDebugListener.onAudioSessionId(sessionId);
}
}
@Override
public void onAudioDecoderInitialized(String decoderName, long initializedTimestampMs,
long initializationDurationMs) {
if (audioDebugListener != null) {
audioDebugListener.onAudioDecoderInitialized(decoderName, initializedTimestampMs,
initializationDurationMs);
}
}
@Override
public void onAudioInputFormatChanged(Format format) {
audioFormat = format;
if (audioDebugListener != null) {
audioDebugListener.onAudioInputFormatChanged(format);
}
}
@Override
public void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs,
long elapsedSinceLastFeedMs) throws IOException {
if (audioDebugListener != null) {
audioDebugListener.onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
}
@Override
public void onAudioDisabled(DecoderCounters counters) {
if (audioDebugListener != null) {
audioDebugListener.onAudioDisabled(counters);
}
audioFormat = null;
audioDecoderCounters = null;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
}
// TextRenderer.Output implementation
@Override
public void onCues(List<Cue> cues) {
for (TextRenderer.Output textOutput : textOutputs) {
textOutput.onCues(cues);
}
}
// MetadataRenderer.Output implementation
@Override
public void onMetadata(Metadata metadata) {
for (MetadataRenderer.Output metadataOutput : metadataOutputs) {
metadataOutput.onMetadata(metadata);
}
}
// SurfaceHolder.Callback implementation
@Override
public void surfaceCreated(SurfaceHolder holder) {
setVideoSurfaceInternal(holder.getSurface(), false);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// Do nothing.
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
setVideoSurfaceInternal(null, false);
}
// TextureView.SurfaceTextureListener implementation
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
setVideoSurfaceInternal(new Surface(surfaceTexture), true);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {
// Do nothing.
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
setVideoSurfaceInternal(null, true);
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
// Do nothing.
}
public final Object getCurrentManifest_() {
return player.getCurrentManifest();
}
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.aiplatform.v1;
import static com.google.cloud.aiplatform.v1.VizierServiceClient.ListStudiesPagedResponse;
import static com.google.cloud.aiplatform.v1.VizierServiceClient.ListTrialsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.ClientSettings;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.cloud.aiplatform.v1.stub.VizierServiceStubSettings;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link VizierServiceClient}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (aiplatform.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of createStudy to 30 seconds:
*
* <pre>{@code
* VizierServiceSettings.Builder vizierServiceSettingsBuilder = VizierServiceSettings.newBuilder();
* vizierServiceSettingsBuilder
* .createStudySettings()
* .setRetrySettings(
* vizierServiceSettingsBuilder
* .createStudySettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* VizierServiceSettings vizierServiceSettings = vizierServiceSettingsBuilder.build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class VizierServiceSettings extends ClientSettings<VizierServiceSettings> {
/** Returns the object with the settings used for calls to createStudy. */
public UnaryCallSettings<CreateStudyRequest, Study> createStudySettings() {
return ((VizierServiceStubSettings) getStubSettings()).createStudySettings();
}
/** Returns the object with the settings used for calls to getStudy. */
public UnaryCallSettings<GetStudyRequest, Study> getStudySettings() {
return ((VizierServiceStubSettings) getStubSettings()).getStudySettings();
}
/** Returns the object with the settings used for calls to listStudies. */
public PagedCallSettings<ListStudiesRequest, ListStudiesResponse, ListStudiesPagedResponse>
listStudiesSettings() {
return ((VizierServiceStubSettings) getStubSettings()).listStudiesSettings();
}
/** Returns the object with the settings used for calls to deleteStudy. */
public UnaryCallSettings<DeleteStudyRequest, Empty> deleteStudySettings() {
return ((VizierServiceStubSettings) getStubSettings()).deleteStudySettings();
}
/** Returns the object with the settings used for calls to lookupStudy. */
public UnaryCallSettings<LookupStudyRequest, Study> lookupStudySettings() {
return ((VizierServiceStubSettings) getStubSettings()).lookupStudySettings();
}
/** Returns the object with the settings used for calls to suggestTrials. */
public UnaryCallSettings<SuggestTrialsRequest, Operation> suggestTrialsSettings() {
return ((VizierServiceStubSettings) getStubSettings()).suggestTrialsSettings();
}
/** Returns the object with the settings used for calls to suggestTrials. */
public OperationCallSettings<SuggestTrialsRequest, SuggestTrialsResponse, SuggestTrialsMetadata>
suggestTrialsOperationSettings() {
return ((VizierServiceStubSettings) getStubSettings()).suggestTrialsOperationSettings();
}
/** Returns the object with the settings used for calls to createTrial. */
public UnaryCallSettings<CreateTrialRequest, Trial> createTrialSettings() {
return ((VizierServiceStubSettings) getStubSettings()).createTrialSettings();
}
/** Returns the object with the settings used for calls to getTrial. */
public UnaryCallSettings<GetTrialRequest, Trial> getTrialSettings() {
return ((VizierServiceStubSettings) getStubSettings()).getTrialSettings();
}
/** Returns the object with the settings used for calls to listTrials. */
public PagedCallSettings<ListTrialsRequest, ListTrialsResponse, ListTrialsPagedResponse>
listTrialsSettings() {
return ((VizierServiceStubSettings) getStubSettings()).listTrialsSettings();
}
/** Returns the object with the settings used for calls to addTrialMeasurement. */
public UnaryCallSettings<AddTrialMeasurementRequest, Trial> addTrialMeasurementSettings() {
return ((VizierServiceStubSettings) getStubSettings()).addTrialMeasurementSettings();
}
/** Returns the object with the settings used for calls to completeTrial. */
public UnaryCallSettings<CompleteTrialRequest, Trial> completeTrialSettings() {
return ((VizierServiceStubSettings) getStubSettings()).completeTrialSettings();
}
/** Returns the object with the settings used for calls to deleteTrial. */
public UnaryCallSettings<DeleteTrialRequest, Empty> deleteTrialSettings() {
return ((VizierServiceStubSettings) getStubSettings()).deleteTrialSettings();
}
/** Returns the object with the settings used for calls to checkTrialEarlyStoppingState. */
public UnaryCallSettings<CheckTrialEarlyStoppingStateRequest, Operation>
checkTrialEarlyStoppingStateSettings() {
return ((VizierServiceStubSettings) getStubSettings()).checkTrialEarlyStoppingStateSettings();
}
/** Returns the object with the settings used for calls to checkTrialEarlyStoppingState. */
public OperationCallSettings<
CheckTrialEarlyStoppingStateRequest,
CheckTrialEarlyStoppingStateResponse,
CheckTrialEarlyStoppingStateMetatdata>
checkTrialEarlyStoppingStateOperationSettings() {
return ((VizierServiceStubSettings) getStubSettings())
.checkTrialEarlyStoppingStateOperationSettings();
}
/** Returns the object with the settings used for calls to stopTrial. */
public UnaryCallSettings<StopTrialRequest, Trial> stopTrialSettings() {
return ((VizierServiceStubSettings) getStubSettings()).stopTrialSettings();
}
/** Returns the object with the settings used for calls to listOptimalTrials. */
public UnaryCallSettings<ListOptimalTrialsRequest, ListOptimalTrialsResponse>
listOptimalTrialsSettings() {
return ((VizierServiceStubSettings) getStubSettings()).listOptimalTrialsSettings();
}
public static final VizierServiceSettings create(VizierServiceStubSettings stub)
throws IOException {
return new VizierServiceSettings.Builder(stub.toBuilder()).build();
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return VizierServiceStubSettings.defaultExecutorProviderBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return VizierServiceStubSettings.getDefaultEndpoint();
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return VizierServiceStubSettings.getDefaultServiceScopes();
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return VizierServiceStubSettings.defaultCredentialsProviderBuilder();
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return VizierServiceStubSettings.defaultGrpcTransportProviderBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return VizierServiceStubSettings.defaultTransportChannelProvider();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return VizierServiceStubSettings.defaultApiClientHeaderProviderBuilder();
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected VizierServiceSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
}
/** Builder for VizierServiceSettings. */
public static class Builder extends ClientSettings.Builder<VizierServiceSettings, Builder> {
protected Builder() throws IOException {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(VizierServiceStubSettings.newBuilder(clientContext));
}
protected Builder(VizierServiceSettings settings) {
super(settings.getStubSettings().toBuilder());
}
protected Builder(VizierServiceStubSettings.Builder stubSettings) {
super(stubSettings);
}
private static Builder createDefault() {
return new Builder(VizierServiceStubSettings.newBuilder());
}
public VizierServiceStubSettings.Builder getStubSettingsBuilder() {
return ((VizierServiceStubSettings.Builder) getStubSettings());
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(
getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater);
return this;
}
/** Returns the builder for the settings used for calls to createStudy. */
public UnaryCallSettings.Builder<CreateStudyRequest, Study> createStudySettings() {
return getStubSettingsBuilder().createStudySettings();
}
/** Returns the builder for the settings used for calls to getStudy. */
public UnaryCallSettings.Builder<GetStudyRequest, Study> getStudySettings() {
return getStubSettingsBuilder().getStudySettings();
}
/** Returns the builder for the settings used for calls to listStudies. */
public PagedCallSettings.Builder<
ListStudiesRequest, ListStudiesResponse, ListStudiesPagedResponse>
listStudiesSettings() {
return getStubSettingsBuilder().listStudiesSettings();
}
/** Returns the builder for the settings used for calls to deleteStudy. */
public UnaryCallSettings.Builder<DeleteStudyRequest, Empty> deleteStudySettings() {
return getStubSettingsBuilder().deleteStudySettings();
}
/** Returns the builder for the settings used for calls to lookupStudy. */
public UnaryCallSettings.Builder<LookupStudyRequest, Study> lookupStudySettings() {
return getStubSettingsBuilder().lookupStudySettings();
}
/** Returns the builder for the settings used for calls to suggestTrials. */
public UnaryCallSettings.Builder<SuggestTrialsRequest, Operation> suggestTrialsSettings() {
return getStubSettingsBuilder().suggestTrialsSettings();
}
/** Returns the builder for the settings used for calls to suggestTrials. */
public OperationCallSettings.Builder<
SuggestTrialsRequest, SuggestTrialsResponse, SuggestTrialsMetadata>
suggestTrialsOperationSettings() {
return getStubSettingsBuilder().suggestTrialsOperationSettings();
}
/** Returns the builder for the settings used for calls to createTrial. */
public UnaryCallSettings.Builder<CreateTrialRequest, Trial> createTrialSettings() {
return getStubSettingsBuilder().createTrialSettings();
}
/** Returns the builder for the settings used for calls to getTrial. */
public UnaryCallSettings.Builder<GetTrialRequest, Trial> getTrialSettings() {
return getStubSettingsBuilder().getTrialSettings();
}
/** Returns the builder for the settings used for calls to listTrials. */
public PagedCallSettings.Builder<ListTrialsRequest, ListTrialsResponse, ListTrialsPagedResponse>
listTrialsSettings() {
return getStubSettingsBuilder().listTrialsSettings();
}
/** Returns the builder for the settings used for calls to addTrialMeasurement. */
public UnaryCallSettings.Builder<AddTrialMeasurementRequest, Trial>
addTrialMeasurementSettings() {
return getStubSettingsBuilder().addTrialMeasurementSettings();
}
/** Returns the builder for the settings used for calls to completeTrial. */
public UnaryCallSettings.Builder<CompleteTrialRequest, Trial> completeTrialSettings() {
return getStubSettingsBuilder().completeTrialSettings();
}
/** Returns the builder for the settings used for calls to deleteTrial. */
public UnaryCallSettings.Builder<DeleteTrialRequest, Empty> deleteTrialSettings() {
return getStubSettingsBuilder().deleteTrialSettings();
}
/** Returns the builder for the settings used for calls to checkTrialEarlyStoppingState. */
public UnaryCallSettings.Builder<CheckTrialEarlyStoppingStateRequest, Operation>
checkTrialEarlyStoppingStateSettings() {
return getStubSettingsBuilder().checkTrialEarlyStoppingStateSettings();
}
/** Returns the builder for the settings used for calls to checkTrialEarlyStoppingState. */
public OperationCallSettings.Builder<
CheckTrialEarlyStoppingStateRequest,
CheckTrialEarlyStoppingStateResponse,
CheckTrialEarlyStoppingStateMetatdata>
checkTrialEarlyStoppingStateOperationSettings() {
return getStubSettingsBuilder().checkTrialEarlyStoppingStateOperationSettings();
}
/** Returns the builder for the settings used for calls to stopTrial. */
public UnaryCallSettings.Builder<StopTrialRequest, Trial> stopTrialSettings() {
return getStubSettingsBuilder().stopTrialSettings();
}
/** Returns the builder for the settings used for calls to listOptimalTrials. */
public UnaryCallSettings.Builder<ListOptimalTrialsRequest, ListOptimalTrialsResponse>
listOptimalTrialsSettings() {
return getStubSettingsBuilder().listOptimalTrialsSettings();
}
@Override
public VizierServiceSettings build() throws IOException {
return new VizierServiceSettings(this);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring;
import org.apache.dubbo.common.utils.CollectionUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.config.ApplicationConfig;
import org.apache.dubbo.config.ConfigCenterConfig;
import org.apache.dubbo.config.ConsumerConfig;
import org.apache.dubbo.config.MetadataReportConfig;
import org.apache.dubbo.config.MetricsConfig;
import org.apache.dubbo.config.ModuleConfig;
import org.apache.dubbo.config.MonitorConfig;
import org.apache.dubbo.config.ReferenceConfig;
import org.apache.dubbo.config.RegistryConfig;
import org.apache.dubbo.config.annotation.Reference;
import org.apache.dubbo.config.spring.extension.SpringExtensionFactory;
import org.apache.dubbo.config.support.Parameter;
import org.springframework.beans.factory.BeanFactoryUtils;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.apache.dubbo.common.constants.CommonConstants.COMMA_SPLIT_PATTERN;
/**
* ReferenceFactoryBean
*/
public class ReferenceBean<T> extends ReferenceConfig<T> implements FactoryBean, ApplicationContextAware, InitializingBean, DisposableBean {
private static final long serialVersionUID = 213195494150089726L;
private transient ApplicationContext applicationContext;
public ReferenceBean() {
super();
}
public ReferenceBean(Reference reference) {
super(reference);
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
SpringExtensionFactory.addApplicationContext(applicationContext);
}
@Override
public Object getObject() {
return get();
}
@Override
public Class<?> getObjectType() {
return getInterfaceClass();
}
@Override
@Parameter(excluded = true)
public boolean isSingleton() {
return true;
}
@Override
@SuppressWarnings({"unchecked"})
public void afterPropertiesSet() throws Exception {
if (applicationContext != null) {
BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, ConfigCenterBean.class, false, false);
}
if (getConsumer() == null) {
Map<String, ConsumerConfig> consumerConfigMap = applicationContext == null ? null : BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, ConsumerConfig.class, false, false);
if (consumerConfigMap != null && consumerConfigMap.size() > 0) {
ConsumerConfig consumerConfig = null;
for (ConsumerConfig config : consumerConfigMap.values()) {
if (config.isDefault() == null || config.isDefault()) {
if (consumerConfig != null) {
throw new IllegalStateException("Duplicate consumer configs: " + consumerConfig + " and " + config);
}
consumerConfig = config;
}
}
if (consumerConfig != null) {
setConsumer(consumerConfig);
}
}
}
if (getApplication() == null
&& (getConsumer() == null || getConsumer().getApplication() == null)) {
Map<String, ApplicationConfig> applicationConfigMap = applicationContext == null ? null : BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, ApplicationConfig.class, false, false);
if (applicationConfigMap != null && applicationConfigMap.size() > 0) {
ApplicationConfig applicationConfig = null;
for (ApplicationConfig config : applicationConfigMap.values()) {
if (applicationConfig != null) {
throw new IllegalStateException("Duplicate application configs: " + applicationConfig + " and " + config);
}
applicationConfig = config;
}
if (applicationConfig != null) {
setApplication(applicationConfig);
}
}
}
if (getModule() == null
&& (getConsumer() == null || getConsumer().getModule() == null)) {
Map<String, ModuleConfig> moduleConfigMap = applicationContext == null ? null : BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, ModuleConfig.class, false, false);
if (moduleConfigMap != null && moduleConfigMap.size() > 0) {
ModuleConfig moduleConfig = null;
for (ModuleConfig config : moduleConfigMap.values()) {
if (config.isDefault() == null || config.isDefault()) {
if (moduleConfig != null) {
throw new IllegalStateException("Duplicate module configs: " + moduleConfig + " and " + config);
}
moduleConfig = config;
}
}
if (moduleConfig != null) {
setModule(moduleConfig);
}
}
}
if (StringUtils.isEmpty(getRegistryIds())) {
if (getApplication() != null && StringUtils.isNotEmpty(getApplication().getRegistryIds())) {
setRegistryIds(getApplication().getRegistryIds());
}
if (getConsumer() != null && StringUtils.isNotEmpty(getConsumer().getRegistryIds())) {
setRegistryIds(getConsumer().getRegistryIds());
}
}
if (CollectionUtils.isEmpty(getRegistries())
&& (getConsumer() == null || CollectionUtils.isEmpty(getConsumer().getRegistries()))
&& (getApplication() == null || CollectionUtils.isEmpty(getApplication().getRegistries()))) {
Map<String, RegistryConfig> registryConfigMap = applicationContext == null ? null : BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, RegistryConfig.class, false, false);
if (registryConfigMap != null && registryConfigMap.size() > 0) {
List<RegistryConfig> registryConfigs = new ArrayList<>();
if (StringUtils.isNotEmpty(registryIds)) {
Arrays.stream(COMMA_SPLIT_PATTERN.split(registryIds)).forEach(id -> {
if (registryConfigMap.containsKey(id)) {
registryConfigs.add(registryConfigMap.get(id));
}
});
}
if (registryConfigs.isEmpty()) {
for (RegistryConfig config : registryConfigMap.values()) {
if (StringUtils.isEmpty(registryIds)) {
registryConfigs.add(config);
}
}
}
if (!registryConfigs.isEmpty()) {
super.setRegistries(registryConfigs);
}
}
}
if (getMetadataReportConfig() == null) {
Map<String, MetadataReportConfig> metadataReportConfigMap = applicationContext == null ? null : BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, MetadataReportConfig.class, false, false);
if (metadataReportConfigMap != null && metadataReportConfigMap.size() == 1) {
// first elements
super.setMetadataReportConfig(metadataReportConfigMap.values().iterator().next());
} else if (metadataReportConfigMap != null && metadataReportConfigMap.size() > 1) {
throw new IllegalStateException("Multiple MetadataReport configs: " + metadataReportConfigMap);
}
}
if (getConfigCenter() == null) {
Map<String, ConfigCenterConfig> configenterMap = applicationContext == null ? null : BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, ConfigCenterConfig.class, false, false);
if (configenterMap != null && configenterMap.size() == 1) {
super.setConfigCenter(configenterMap.values().iterator().next());
} else if (configenterMap != null && configenterMap.size() > 1) {
throw new IllegalStateException("Multiple ConfigCenter found:" + configenterMap);
}
}
if (getMonitor() == null
&& (getConsumer() == null || getConsumer().getMonitor() == null)
&& (getApplication() == null || getApplication().getMonitor() == null)) {
Map<String, MonitorConfig> monitorConfigMap = applicationContext == null ? null : BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, MonitorConfig.class, false, false);
if (monitorConfigMap != null && monitorConfigMap.size() > 0) {
MonitorConfig monitorConfig = null;
for (MonitorConfig config : monitorConfigMap.values()) {
if (config.isDefault() == null || config.isDefault()) {
if (monitorConfig != null) {
throw new IllegalStateException("Duplicate monitor configs: " + monitorConfig + " and " + config);
}
monitorConfig = config;
}
}
if (monitorConfig != null) {
setMonitor(monitorConfig);
}
}
}
if (getMetrics() == null) {
Map<String, MetricsConfig> metricsConfigMap = applicationContext == null ? null : BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, MetricsConfig.class, false, false);
if (metricsConfigMap != null && metricsConfigMap.size() > 0) {
MetricsConfig metricsConfig = null;
for (MetricsConfig config : metricsConfigMap.values()) {
if (metricsConfig != null) {
throw new IllegalStateException("Duplicate metrics configs: " + metricsConfig + " and " + config);
}
metricsConfig = config;
}
if (metricsConfig != null) {
setMetrics(metricsConfig);
}
}
}
if (shouldInit()) {
getObject();
}
}
@Override
public void destroy() {
// do nothing
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* DescribeSnapshotAttributeType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* DescribeSnapshotAttributeType bean class
*/
public class DescribeSnapshotAttributeType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = DescribeSnapshotAttributeType
Namespace URI = http://ec2.amazonaws.com/doc/2010-11-15/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2010-11-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for SnapshotId
*/
protected java.lang.String localSnapshotId ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getSnapshotId(){
return localSnapshotId;
}
/**
* Auto generated setter method
* @param param SnapshotId
*/
public void setSnapshotId(java.lang.String param){
this.localSnapshotId=param;
}
/**
* field for DescribeSnapshotAttributesGroup
*/
protected com.amazon.ec2.DescribeSnapshotAttributesGroup localDescribeSnapshotAttributesGroup ;
/**
* Auto generated getter method
* @return com.amazon.ec2.DescribeSnapshotAttributesGroup
*/
public com.amazon.ec2.DescribeSnapshotAttributesGroup getDescribeSnapshotAttributesGroup(){
return localDescribeSnapshotAttributesGroup;
}
/**
* Auto generated setter method
* @param param DescribeSnapshotAttributesGroup
*/
public void setDescribeSnapshotAttributesGroup(com.amazon.ec2.DescribeSnapshotAttributesGroup param){
this.localDescribeSnapshotAttributesGroup=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
DescribeSnapshotAttributeType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2010-11-15/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":DescribeSnapshotAttributeType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"DescribeSnapshotAttributeType",
xmlWriter);
}
}
namespace = "http://ec2.amazonaws.com/doc/2010-11-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"snapshotId", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"snapshotId");
}
} else {
xmlWriter.writeStartElement("snapshotId");
}
if (localSnapshotId==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("snapshotId cannot be null!!");
}else{
xmlWriter.writeCharacters(localSnapshotId);
}
xmlWriter.writeEndElement();
if (localDescribeSnapshotAttributesGroup==null){
throw new org.apache.axis2.databinding.ADBException("DescribeSnapshotAttributesGroup cannot be null!!");
}
localDescribeSnapshotAttributesGroup.serialize(null,factory,xmlWriter);
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/",
"snapshotId"));
if (localSnapshotId != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSnapshotId));
} else {
throw new org.apache.axis2.databinding.ADBException("snapshotId cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/",
"DescribeSnapshotAttributesGroup"));
if (localDescribeSnapshotAttributesGroup==null){
throw new org.apache.axis2.databinding.ADBException("DescribeSnapshotAttributesGroup cannot be null!!");
}
elementList.add(localDescribeSnapshotAttributesGroup);
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static DescribeSnapshotAttributeType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
DescribeSnapshotAttributeType object =
new DescribeSnapshotAttributeType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"DescribeSnapshotAttributeType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (DescribeSnapshotAttributeType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","snapshotId").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setSnapshotId(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() ){
object.setDescribeSnapshotAttributesGroup(com.amazon.ec2.DescribeSnapshotAttributesGroup.Factory.parse(reader));
} // End of if for expected property start element
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.io;
import com.intellij.Patches;
import com.intellij.ide.IdeBundle;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationInfo;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.util.io.StreamUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.SystemProperties;
import com.intellij.util.Url;
import com.intellij.util.net.HttpConfigurable;
import com.intellij.util.net.NetUtils;
import com.intellij.util.net.ssl.CertificateManager;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.net.ssl.*;
import java.io.*;
import java.net.*;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
/**
* <p>Handy class for reading data from URL connections with built-in support for HTTP redirects and gzipped content and automatic cleanup.</p>
*
* <h3>Examples</h3>
*
* <p>Reading the whole response into a string:<br>
* {@code HttpRequests.request("https://example.com").readString(progressIndicator)}</p>
*
* <p>Downloading a file:<br>
* {@code HttpRequests.request("https://example.com/file.zip").saveToFile(new File(downloadDir, "temp.zip"), progressIndicator)}</p>
*
* <p>Tuning a connection:<br>
* {@code HttpRequests.request(url).userAgent("IntelliJ").readString()}<br>
* {@code HttpRequests.request(url).tuner(connection -> connection.setRequestProperty("X-Custom", value)).readString()}</p>
*
* <p>Using the input stream to implement custom reading logic:<br>
* {@code int firstByte = HttpRequests.request("file:///dev/random").connect(request -> request.getInputStream().read())}<br>
* {@code String firstLine = HttpRequests.request("https://example.com").connect(request -> new BufferedReader(request.getReader()).readLine())}</p>
*
* @see HttpStatusException a sublass of IOException, which includes an actual URL and HTTP response code
* @see URLUtil
*/
public final class HttpRequests {
private static final Logger LOG = Logger.getInstance(HttpRequests.class);
public static final String JSON_CONTENT_TYPE = "application/json; charset=utf-8";
public static final int CONNECTION_TIMEOUT = SystemProperties.getIntProperty("idea.connection.timeout", 10000);
public static final int READ_TIMEOUT = SystemProperties.getIntProperty("idea.read.timeout", 60000);
public static final int REDIRECT_LIMIT = SystemProperties.getIntProperty("idea.redirect.limit", 10);
private static final int[] REDIRECTS = {
// temporary redirects
HttpURLConnection.HTTP_MOVED_TEMP, 307 /* temporary redirect */,
// permanent redirects
HttpURLConnection.HTTP_MOVED_PERM, HttpURLConnection.HTTP_SEE_OTHER, 308 /* permanent redirect */
};
private HttpRequests() { }
public interface Request {
@NotNull
String getURL();
@NotNull
URLConnection getConnection() throws IOException;
@NotNull
InputStream getInputStream() throws IOException;
@NotNull
BufferedReader getReader() throws IOException;
@NotNull
BufferedReader getReader(@Nullable ProgressIndicator indicator) throws IOException;
/** @deprecated Called automatically on open connection. Use {@link RequestBuilder#tryConnect()} to get response code */
@Deprecated
boolean isSuccessful() throws IOException;
@NotNull
File saveToFile(@NotNull File file, @Nullable ProgressIndicator indicator) throws IOException;
byte @NotNull [] readBytes(@Nullable ProgressIndicator indicator) throws IOException;
@NotNull
String readString(@Nullable ProgressIndicator indicator) throws IOException;
@NotNull
default String readString() throws IOException {
return readString(null);
}
@NotNull
CharSequence readChars(@Nullable ProgressIndicator indicator) throws IOException;
default void write(@NotNull String data) throws IOException {
write(data.getBytes(StandardCharsets.UTF_8));
}
default void write(byte @NotNull [] data) throws IOException {
HttpURLConnection connection = (HttpURLConnection)getConnection();
connection.setFixedLengthStreamingMode(data.length);
try (OutputStream stream = connection.getOutputStream()) {
stream.write(data);
}
}
}
public interface RequestProcessor<T> {
T process(@NotNull Request request) throws IOException;
}
public interface ConnectionTuner {
void tune(@NotNull URLConnection connection) throws IOException;
}
public static class HttpStatusException extends IOException {
private final int myStatusCode;
private final String myUrl;
public HttpStatusException(@NotNull String message, int statusCode, @NotNull String url) {
super(message);
myStatusCode = statusCode;
myUrl = url;
}
public int getStatusCode() {
return myStatusCode;
}
@NotNull
public String getUrl() {
return myUrl;
}
@Override
public String toString() {
return super.toString() + ". Status=" + myStatusCode + ", Url=" + myUrl;
}
}
@NotNull
public static RequestBuilder request(@NotNull Url url) {
return request(url.toExternalForm());
}
@NotNull
public static RequestBuilder request(@NotNull String url) {
return new RequestBuilderImpl(url, null);
}
@NotNull
public static RequestBuilder head(@NotNull String url) {
return new RequestBuilderImpl(url, connection -> ((HttpURLConnection)connection).setRequestMethod("HEAD"));
}
@NotNull
public static RequestBuilder delete(@NotNull String url) {
return new RequestBuilderImpl(url, connection -> ((HttpURLConnection)connection).setRequestMethod("DELETE"));
}
@NotNull
public static RequestBuilder delete(@NotNull String url, @Nullable String contentType) {
return requestWithBody(url, "DELETE", contentType, null);
}
@NotNull
public static RequestBuilder post(@NotNull String url, @Nullable String contentType) {
return requestWithBody(url, "POST", contentType, null);
}
@NotNull
public static RequestBuilder put(@NotNull String url, @Nullable String contentType) {
return requestWithBody(url, "PUT", contentType, null);
}
/**
* Java does not support "newer" HTTP methods, so we have to rely on server-side support of `X-HTTP-Method-Override` header to invoke PATCH
* For reasoning see {@link HttpURLConnection#setRequestMethod(String)}
* <p>
* TODO: either fiddle with reflection or patch JDK to avoid server reliance
*/
@NotNull
public static RequestBuilder patch(@NotNull String url, @Nullable String contentType) {
return requestWithBody(url, "POST", contentType,
connection -> connection.setRequestProperty("X-HTTP-Method-Override", "PATCH"));
}
@NotNull
private static RequestBuilder requestWithBody(@NotNull String url,
@NotNull String requestMethod,
@Nullable String contentType,
@Nullable ConnectionTuner tuner) {
return new RequestBuilderImpl(url, rawConnection -> {
HttpURLConnection connection = (HttpURLConnection)rawConnection;
connection.setRequestMethod(requestMethod);
connection.setDoOutput(true);
if (contentType != null) {
connection.setRequestProperty("Content-Type", contentType);
}
if (tuner != null) tuner.tune(connection);
});
}
@NotNull
public static String createErrorMessage(@NotNull IOException e, @NotNull Request request, boolean includeHeaders) {
StringBuilder builder = new StringBuilder();
builder.append("Cannot download '").append(request.getURL()).append("': ").append(e.getMessage());
try {
URLConnection connection = request.getConnection();
if (includeHeaders) {
builder.append("\n, headers: ").append(connection.getHeaderFields());
}
if (connection instanceof HttpURLConnection) {
HttpURLConnection httpConnection = (HttpURLConnection)connection;
builder.append("\n, response: ").append(httpConnection.getResponseCode()).append(' ').append(httpConnection.getResponseMessage());
}
}
catch (Throwable ignored) { }
return builder.toString();
}
private static class RequestBuilderImpl extends RequestBuilder {
private final String myUrl;
private int myConnectTimeout = CONNECTION_TIMEOUT;
private int myTimeout = READ_TIMEOUT;
private int myRedirectLimit = REDIRECT_LIMIT;
private boolean myGzip = true;
private boolean myForceHttps;
private boolean myUseProxy = true;
private boolean myIsReadResponseOnError;
private HostnameVerifier myHostnameVerifier;
private String myUserAgent;
private String myAccept;
private ConnectionTuner myTuner;
private final ConnectionTuner myInternalTuner;
private boolean myThrowStatusCodeException = true;
private RequestBuilderImpl(@NotNull String url, @Nullable ConnectionTuner internalTuner) {
myUrl = url;
myInternalTuner = internalTuner;
}
@Override
public RequestBuilder connectTimeout(int value) {
myConnectTimeout = value;
return this;
}
@Override
public RequestBuilder readTimeout(int value) {
myTimeout = value;
return this;
}
@Override
public RequestBuilder redirectLimit(int redirectLimit) {
myRedirectLimit = redirectLimit;
return this;
}
@Override
public RequestBuilder gzip(boolean value) {
myGzip = value;
return this;
}
@Override
public RequestBuilder forceHttps(boolean forceHttps) {
myForceHttps = forceHttps;
return this;
}
@Override
public RequestBuilder useProxy(boolean useProxy) {
myUseProxy = useProxy;
return this;
}
@Override
public RequestBuilder isReadResponseOnError(boolean isReadResponseOnError) {
myIsReadResponseOnError = isReadResponseOnError;
return this;
}
@Override
public RequestBuilder hostNameVerifier(@Nullable HostnameVerifier hostnameVerifier) {
myHostnameVerifier = hostnameVerifier;
return this;
}
@Override
public RequestBuilder userAgent(@Nullable String userAgent) {
myUserAgent = userAgent;
return this;
}
@Override
public RequestBuilder productNameAsUserAgent() {
Application app = ApplicationManager.getApplication();
if (app != null && !app.isDisposed()) {
String productName = ApplicationNamesInfo.getInstance().getFullProductName();
String version = ApplicationInfo.getInstance().getBuild().asStringWithoutProductCode();
return userAgent(productName + '/' + version);
}
else {
return userAgent("IntelliJ");
}
}
@Override
public RequestBuilder accept(@Nullable String mimeType) {
myAccept = mimeType;
return this;
}
@Override
public RequestBuilder tuner(@Nullable ConnectionTuner tuner) {
myTuner = tuner;
return this;
}
@NotNull
@Override
public RequestBuilder throwStatusCodeException(boolean shouldThrow) {
myThrowStatusCodeException = shouldThrow;
return this;
}
@Override
public <T> T connect(@NotNull HttpRequests.RequestProcessor<T> processor) throws IOException {
return process(this, processor);
}
}
private static class RequestImpl implements Request, AutoCloseable {
private final RequestBuilderImpl myBuilder;
private String myUrl;
private URLConnection myConnection;
private InputStream myInputStream;
private BufferedReader myReader;
private RequestImpl(RequestBuilderImpl builder) {
myBuilder = builder;
myUrl = myBuilder.myUrl;
}
@NotNull
@Override
public String getURL() {
return myUrl;
}
@NotNull
@Override
public URLConnection getConnection() throws IOException {
if (myConnection == null) {
myConnection = openConnection(myBuilder, this);
}
return myConnection;
}
@NotNull
@Override
public InputStream getInputStream() throws IOException {
if (myInputStream == null) {
myInputStream = getConnection().getInputStream();
if (myBuilder.myGzip && "gzip".equalsIgnoreCase(getConnection().getContentEncoding())) {
myInputStream = CountingGZIPInputStream.create(myInputStream);
}
}
return myInputStream;
}
@NotNull
@Override
public BufferedReader getReader() throws IOException {
return getReader(null);
}
@NotNull
@Override
public BufferedReader getReader(@Nullable ProgressIndicator indicator) throws IOException {
if (myReader == null) {
InputStream inputStream = getInputStream();
if (indicator != null) {
int contentLength = getConnection().getContentLength();
if (contentLength > 0) {
inputStream = new ProgressMonitorInputStream(indicator, inputStream, contentLength);
}
}
myReader = new BufferedReader(new InputStreamReader(inputStream, getCharset()));
}
return myReader;
}
@NotNull
private Charset getCharset() throws IOException {
return HttpUrlConnectionUtil.getCharset(getConnection());
}
@Override
public boolean isSuccessful() throws IOException {
URLConnection connection = getConnection();
return !(connection instanceof HttpURLConnection) || ((HttpURLConnection)connection).getResponseCode() == 200;
}
@Override
public byte @NotNull [] readBytes(@Nullable ProgressIndicator indicator) throws IOException {
return doReadBytes(indicator).toByteArray();
}
@NotNull
private BufferExposingByteArrayOutputStream doReadBytes(@Nullable ProgressIndicator indicator) throws IOException {
return HttpUrlConnectionUtil.readBytes(getInputStream(), getConnection(), indicator);
}
@NotNull
@Override
public String readString(@Nullable ProgressIndicator indicator) throws IOException {
return HttpUrlConnectionUtil.readString(getInputStream(), getConnection(), indicator);
}
@NotNull
@Override
public CharSequence readChars(@Nullable ProgressIndicator indicator) throws IOException {
BufferExposingByteArrayOutputStream byteStream = doReadBytes(indicator);
if (byteStream.size() == 0) {
return ArrayUtil.EMPTY_CHAR_SEQUENCE;
}
else {
return getCharset().decode(ByteBuffer.wrap(byteStream.getInternalBuffer(), 0, byteStream.size()));
}
}
@Override
@NotNull
public File saveToFile(@NotNull File file, @Nullable ProgressIndicator indicator) throws IOException {
FileUtilRt.createParentDirs(file);
boolean deleteFile = true;
try (OutputStream out = new BufferedOutputStream(new FileOutputStream(file))) {
NetUtils.copyStreamContent(indicator, getInputStream(), out, getConnection().getContentLength());
deleteFile = false;
}
catch (HttpStatusException e) {
throw e;
}
catch (IOException e) {
throw new IOException(createErrorMessage(e, this, false), e);
}
finally {
if (deleteFile) {
FileUtilRt.delete(file);
}
}
return file;
}
@Override
public void close() {
StreamUtil.closeStream(myInputStream);
StreamUtil.closeStream(myReader);
if (myConnection instanceof HttpURLConnection) {
((HttpURLConnection)myConnection).disconnect();
}
}
}
private static <T> T process(RequestBuilderImpl builder, RequestProcessor<T> processor) throws IOException {
Application app = ApplicationManager.getApplication();
LOG.assertTrue(app == null || app.isUnitTestMode() || app.isHeadlessEnvironment() || !app.isReadAccessAllowed(),
"Network shouldn't be accessed in EDT or inside read action");
ClassLoader contextLoader = Thread.currentThread().getContextClassLoader();
if (contextLoader != null && shouldOverrideContextClassLoader()) {
// hack-around for class loader lock in sun.net.www.protocol.http.NegotiateAuthentication (IDEA-131621)
try (URLClassLoader cl = new URLClassLoader(new URL[0], contextLoader)) {
Thread.currentThread().setContextClassLoader(cl);
return doProcess(builder, processor);
}
finally {
Thread.currentThread().setContextClassLoader(contextLoader);
}
}
else {
return doProcess(builder, processor);
}
}
private static boolean shouldOverrideContextClassLoader() {
return Patches.JDK_BUG_ID_8032832 &&
SystemProperties.getBooleanProperty("http.requests.override.context.classloader", true);
}
private static <T> T doProcess(RequestBuilderImpl builder, RequestProcessor<T> processor) throws IOException {
try (RequestImpl request = new RequestImpl(builder)) {
T result = processor.process(request);
if (builder.myThrowStatusCodeException) {
URLConnection connection = request.myConnection;
if (connection != null && connection.getDoOutput()) {
// getResponseCode is not checked on connect, because write must be performed before read
HttpURLConnection urlConnection = (HttpURLConnection)connection;
int responseCode = urlConnection.getResponseCode();
if (responseCode >= 400) {
throwHttpStatusError(urlConnection, request, builder, responseCode);
}
}
}
return result;
}
}
private static URLConnection openConnection(RequestBuilderImpl builder, RequestImpl request) throws IOException {
if (builder.myForceHttps && StringUtil.startsWith(request.myUrl, "http:")) {
request.myUrl = "https:" + request.myUrl.substring(5);
}
for (int i = 0; i < builder.myRedirectLimit; i++) {
String url = request.myUrl;
final URLConnection connection;
if (!builder.myUseProxy) {
connection = new URL(url).openConnection(Proxy.NO_PROXY);
}
else if (ApplicationManager.getApplication() == null) {
connection = new URL(url).openConnection();
}
else {
connection = HttpConfigurable.getInstance().openConnection(url);
}
if (connection instanceof HttpsURLConnection) {
configureSslConnection(url, (HttpsURLConnection)connection);
}
connection.setConnectTimeout(builder.myConnectTimeout);
connection.setReadTimeout(builder.myTimeout);
if (builder.myUserAgent != null) {
connection.setRequestProperty("User-Agent", builder.myUserAgent);
}
if (builder.myHostnameVerifier != null && connection instanceof HttpsURLConnection) {
((HttpsURLConnection)connection).setHostnameVerifier(builder.myHostnameVerifier);
}
if (builder.myGzip) {
connection.setRequestProperty("Accept-Encoding", "gzip");
}
if (builder.myAccept != null) {
connection.setRequestProperty("Accept", builder.myAccept);
}
connection.setUseCaches(false);
if (builder.myInternalTuner != null) {
builder.myInternalTuner.tune(connection);
}
if (builder.myTuner != null) {
builder.myTuner.tune(connection);
}
checkRequestHeadersForNulBytes(connection);
if (!(connection instanceof HttpURLConnection)) {
return connection;
}
if (connection.getDoOutput()) {
return connection;
}
HttpURLConnection httpURLConnection = (HttpURLConnection)connection;
String method = httpURLConnection.getRequestMethod();
LOG.assertTrue(method.equals("GET") || method.equals("HEAD") || method.equals("DELETE"),
"'" + method + "' not supported; please use GET, HEAD, DELETE, PUT or POST");
if (LOG.isDebugEnabled()) LOG.debug("connecting to " + url);
int responseCode;
try {
responseCode = httpURLConnection.getResponseCode();
}
catch (SSLHandshakeException e) {
throw !NetUtils.isSniEnabled() ? new SSLException("SSL error probably caused by disabled SNI", e) : e;
}
if (LOG.isDebugEnabled()) LOG.debug("response from " + url + ": " + responseCode);
if (responseCode < 200 || responseCode >= 300 && responseCode != HttpURLConnection.HTTP_NOT_MODIFIED) {
if (ArrayUtil.indexOf(REDIRECTS, responseCode) >= 0) {
httpURLConnection.disconnect();
url = connection.getHeaderField("Location");
if (LOG.isDebugEnabled()) LOG.debug("redirect from " + url + ": " + url);
if (url != null) {
request.myUrl = url;
continue;
}
}
if(builder.myThrowStatusCodeException) {
throwHttpStatusError(httpURLConnection, request, builder, responseCode);
}
}
return connection;
}
throw new IOException(IdeBundle.message("error.connection.failed.redirects"));
}
private static void throwHttpStatusError(HttpURLConnection connection, RequestImpl request, RequestBuilderImpl builder, int responseCode) throws IOException {
String message = null;
if (builder.myIsReadResponseOnError) {
message = HttpUrlConnectionUtil.readString(connection.getErrorStream(), connection);
}
if (StringUtil.isEmpty(message)) {
message = "Request failed with status code " + responseCode;
}
connection.disconnect();
throw new HttpStatusException(message, responseCode, StringUtil.notNullize(request.myUrl, "Empty URL"));
}
private static void configureSslConnection(@NotNull String url, @NotNull HttpsURLConnection connection) {
if (ApplicationManager.getApplication() == null) {
LOG.info("Application is not initialized yet; Using default SSL configuration to connect to " + url);
return;
}
try {
SSLSocketFactory factory = CertificateManager.getInstance().getSslContext().getSocketFactory();
if (factory == null) {
LOG.info("SSLSocketFactory is not defined by the IDE Certificate Manager; Using default SSL configuration to connect to " + url);
}
else {
connection.setSSLSocketFactory(factory);
}
}
catch (Throwable e) {
LOG.info("Problems configuring SSL connection to " + url, e);
}
}
/*
* Many servers would not process a request and just return 400 (Bad Request) response if any of request headers contains NUL byte.
* This method checks the request and removes invalid headers.
*/
private static void checkRequestHeadersForNulBytes(URLConnection connection) {
for (Map.Entry<String, List<String>> header : connection.getRequestProperties().entrySet()) {
for (String headerValue : header.getValue()) {
if (headerValue.indexOf('\0') >= 0) {
connection.setRequestProperty(header.getKey(), null);
LOG.error(String.format("Problem during request to '%s'. Header's '%s' value contains NUL bytes: '%s'. Omitting this header.",
connection.getURL().toString(), header.getKey(), headerValue));
break;
}
}
}
}
}
| |
package io.openhc.ohc.basestation;
import android.content.res.Resources;
import android.net.http.AndroidHttpClient;
import org.apache.http.client.HttpClient;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.ProtocolException;
import java.util.List;
import java.util.logging.Level;
import io.openhc.ohc.OHC;
import io.openhc.ohc.R;
import io.openhc.ohc.basestation.device.Device;
import io.openhc.ohc.basestation.device.Field;
import io.openhc.ohc.basestation.rpc.Base_rpc;
import io.openhc.ohc.basestation.rpc.Rpc_group;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc_device_get_field;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc_device_get_num_fields;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc_device_set_field_value;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc_get_device;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc_get_device_id;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc_get_device_ids;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc_get_device_name;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc_get_num_devices;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc_login;
import io.openhc.ohc.basestation.rpc.rpcs.Rpc_set_device_name;
import io.openhc.ohc.skynet.Network;
import io.openhc.ohc.skynet.Sender;
import io.openhc.ohc.skynet.transaction.Transaction_generator;
import io.openhc.ohc.skynet.udp.Receiver;
/**
* OOP representation of the Basestation (Gateway, OHC-Node)
* This class implements all high-level RPCs. It also holds a dedicated network handler allowing
* for the coexistence of multiple basestation instances
*
* @author Tobias Schramm
*/
public class Basestation implements Rpc_group.Rpc_group_callback
{
private Network network;
public final OHC ohc;
public final Transaction_generator transaction_gen;
private Base_rpc rpc_interface;
private Resources resources;
private Receiver rx_thread;
private AndroidHttpClient http_client;
private Basestation_state state;
private final String RPC_ATTRIBUTE_METHOD;
private final String RPC_REQUEST_KEY;
private final String RPC_RESPONSE_KEY;
/**
* Constructor for recreating the basestation from a serialized state object
*
* @param ohc The linked ohc instance
* @param state The basestation state
* @throws IOException
*/
public Basestation(OHC ohc, Basestation_state state) throws IOException
{
this(ohc, state.get_remote_socket_address(), state.get_protocol());
this.state = state;
}
/**
* Default constructor for constructing a new basestation
*
* @param ohc The linked ohc instance
* @param station_address The address of the basestation
* @param protocol The network protocol used to connect to physical basestation
* @throws IOException
*/
public Basestation(OHC ohc, InetSocketAddress station_address, Network.Protocol protocol) throws IOException
{
this.ohc = ohc;
this.resources = ohc.get_context().getResources();
this.network = new Network(this);
this.rpc_interface = new Base_rpc(ohc);
this.transaction_gen = new Transaction_generator(ohc);
this.state = new Basestation_state();
this.state.set_remote_socket_addr(station_address);
this.state.set_protocol(protocol);
switch(protocol)
{
case UDP:
//Receiver for state updates initiated by the basestation
this.rx_thread = network.setup_receiver();
this.rx_thread.start();
break;
case HTTP:
this.http_client = AndroidHttpClient.newInstance(this.resources.getString(
R.string.ohc_network_http_user_agent));
this.state.set_remote_port(this.resources.getInteger(R.integer.ohc_network_http_port));
}
this.RPC_ATTRIBUTE_METHOD = this.resources.getString(R.string.ohc_rpc_attribute_method);
this.RPC_REQUEST_KEY = this.resources.getString(R.string.ohc_rpc_request_key);
this.RPC_RESPONSE_KEY = this.resources.getString(R.string.ohc_rpc_response_key);
}
//***** Code being called from Base_rpc *****
/**
* [RPC] Sets a new address for the basestation
*
* @param addr The new address
*/
public void update_endpoint(InetSocketAddress addr)
{
this.ohc.get_context().update_network_status(addr != null);
this.state.set_remote_socket_addr(addr);
this.ohc.logger.log(Level.INFO, String.format("Endpoint address updated: %s:%s",
addr.getAddress().getHostAddress(), Integer.toString(addr.getPort())));
}
/**
* [RPC] Sets the session token of this device
*
* @param token A new session token
* @param success Login successful
*/
public void set_session_token(String token, boolean success)
{
if(success)
{
this.ohc.logger.log(Level.INFO, "Session token updated");
this.state.set_session_token(token);
Rpc_group group = new Rpc_group(this);
if(this.get_protocol() == Network.Protocol.HTTP)
group.add_rpcs(this.get_device_ids());
else
group.add_rpcs(this.get_num_devices());
this.run_rpc_group(group);
this.ohc.get_context().set_login_status(false);
}
else
{
this.ohc.logger.log(Level.WARNING, "Wrong username and/or password");
this.ohc.get_context().login_wrong();
}
}
/**
* [RPC] Sets the number of devices attached to this basestation
*
* @param num_devices Number of devices
*/
public void set_num_devices(int num_devices)
{
this.ohc.logger.log(Level.INFO, "Number of attached devices updated: " + num_devices);
this.state.set_num_devices(num_devices);
Rpc_group group = new Rpc_group(this);
for(int i = 0; i < this.state.get_num_devices(); i++)
{
group.add_rpcs(this.get_device_id(i));
}
this.run_rpc_group(group);
}
/**
* [RPC] Sets the internal id of a device based on its index
*
* @param index Index of device in device list
* @param id Internal id
*/
public void set_device_id(int index, String id)
{
this.ohc.logger.log(Level.INFO, String.format("Setting id of device [%d]: %s", index, id));
if(index >= this.state.get_num_devices() || index < 0)
{
this.ohc.logger.log(Level.WARNING, String.format("Device index '%d' out of range. Max %d", index, this.state.get_num_devices() - 1));
return;
}
this.state.put_device(id, null);
Rpc_group group = new Rpc_group(this);
group.add_rpcs(this.get_device_name(id));
this.run_rpc_group(group);
}
/**
* [RPC] Sets the human readable name of a device based on its internal id
*
* @param device_id Internal device id
* @param name Human readable name
*/
public void set_device_name(String device_id, String name)
{
this.state.put_device(device_id, new Device(name, device_id));
Rpc_group group = new Rpc_group(this);
group.add_rpcs(this.device_get_num_fields(device_id));
this.run_rpc_group(group);
}
/**
* [RPC] Sets the number of fields available on the specified device
*
* @param id Internal id of the device
* @param num_fields Number of fields
*/
public void device_set_num_fields(String id, int num_fields)
{
Device dev = this.state.get_device(id);
if(dev != null)
{
dev.set_field_num(num_fields);
Rpc_group group = new Rpc_group(this);
for(int i = 0; i < num_fields; i++)
{
group.add_rpcs(this.device_get_field(id, i));
}
this.run_rpc_group(group);
}
}
/**
* [RPC} Sets a whole field on the specified device
*
* @param id_dev Internal device id
* @param id_field Numeric field id
* @param field The field
*/
public void device_set_field(String id_dev, int id_field, Field field)
{
Device dev = this.state.get_device(id_dev);
if(dev != null)
{
dev.set_field(id_field, field);
if(this.state.get_device_ids().indexOf(id_dev) == this.state.get_num_devices() - 1 && dev.get_field_num() - 1 == id_field)
ohc.draw_device_overview();
}
}
/**
* [RPC] Sets all device ids
*
* @param ids List of all device ids
*/
public void set_device_ids(List<String> ids)
{
this.state.set_device_ids(ids);
Rpc_group group = new Rpc_group(this);
for(String id : ids)
group.add_rpcs(this.rpc_get_device(id));
this.run_rpc_group(group);
}
/**
* [RPC] Adds a device
*
* @param dev Device
*/
public void add_device(Device dev)
{
this.state.put_device(dev.get_id(), dev);
if(this.state.get_device_ids().indexOf(dev.get_id()) == this.state.get_num_devices() - 1)
ohc.draw_device_overview();
}
//Dynamic calls to Base_rpc depending on the received JSON data
/**
* Handles incoming JSON RPC data
*
* @param rpc JSON RPC data
*/
private void call_rpc(JSONObject rpc)
{
try
{
String method = rpc.getString(this.RPC_ATTRIBUTE_METHOD);
this.ohc.logger.log(Level.INFO, "Received RPC: " + method);
/*Dynamically reflecting into the local instance of Base_rpc to dynamically call functions inside
* Base_rpc depending on the method supplied by the main control unit / basestation (OHC-node)*/
this.rpc_interface.getClass().getMethod(method,
JSONObject.class).invoke(this.rpc_interface, rpc);
}
catch(Exception ex)
{
this.ohc.logger.log(Level.SEVERE, "JSON encoded data is missing valid rpc data: " +
ex.getMessage());
}
}
public void handle_rpc(JSONObject data)
{
switch(this.get_protocol())
{
case UDP:
this.call_rpc(data);
break;
case HTTP:
try
{
JSONArray array = data.getJSONArray(this.RPC_RESPONSE_KEY);
for(int i = 0; i < array.length(); i++)
this.call_rpc(array.getJSONObject(i));
}
catch(Exception ex)
{
this.ohc.logger.log(Level.WARNING, "Failed to parse HTTP multipart JSON rpc", ex);
}
}
}
/**
* Wrapper method handling sending of RPCs
*
* @param group Group containing all RPCs to be called
* @throws JSONException
*/
public void make_rpc_call(Rpc_group group) throws JSONException, ProtocolException
{
switch(this.get_protocol())
{
case UDP:
throw new ProtocolException("UDP doesn't support direct sending of RPC groups");
case HTTP:
group.set_session_token(this.state.get_session_token());
InetSocketAddress endpoint = new InetSocketAddress(this.state.get_remote_ip_address(),
this.state.get_remote_port());
Sender s_http = new io.openhc.ohc.skynet.http.Sender(this.ohc, this.http_client,
endpoint, group);
JSONArray rpcs = new JSONArray();
for(Rpc rpc : group.get_rpcs())
{
rpcs.put(rpc.get_transaction().get_json());
}
JSONObject obj = new JSONObject();
obj.put(this.RPC_REQUEST_KEY, rpcs);
Transaction_generator.Transaction transaction_tcp = this.transaction_gen
.generate_transaction(obj);
s_http.execute(transaction_tcp);
}
}
/**
* Wrapper method handling sending of RPCs
*
* @param rpc RPC to be called
* @throws JSONException
*/
public void make_rpc_call(Rpc rpc) throws JSONException
{
switch(this.get_protocol())
{
case UDP:
Sender s_udp = new io.openhc.ohc.skynet.udp.Sender(this.ohc,
this.state.get_remote_socket_address(), rpc);
Transaction_generator.Transaction transaction_udp = rpc.get_transaction();
s_udp.execute(transaction_udp);
break;
case HTTP:
InetSocketAddress endpoint = new InetSocketAddress(this.state.get_remote_ip_address(),
this.state.get_remote_port());
Sender s_tcp = new io.openhc.ohc.skynet.http.Sender(this.ohc, this.http_client,
endpoint, rpc);
JSONArray rpcs = new JSONArray();
rpcs.put(rpc.get_transaction().get_json());
JSONObject obj = new JSONObject();
obj.put(this.RPC_REQUEST_KEY, rpcs);
Transaction_generator.Transaction transaction_tcp = this.transaction_gen
.generate_transaction(obj);
s_tcp.execute(transaction_tcp);
}
}
/**
*
*/
public void run_rpc_group(Rpc_group group)
{
group.set_session_token(this.state.get_session_token());
group.run();
}
//***** RPC functions calling methods on the main control unit (OHC-node) *****
/**
* Makes a login RPC tho the basestation
*
* @param uname Username
* @param passwd Password
*/
public void login(String uname, String passwd)
{
Rpc_group group = new Rpc_group(this);
Rpc_login rpc = new Rpc_login(this);
rpc.set_uname(uname);
rpc.set_passwd(passwd);
group.add_rpcs(rpc);
this.run_rpc_group(group);
}
/**
* Requests the number of attached devices from the basestation
*
* @return The rpc
*/
public Rpc get_num_devices()
{
Rpc_get_num_devices rpc = new Rpc_get_num_devices(this);
return rpc;
}
/**
* Gets the internal id of a device by its index
*
* @param index Device index
* @return The rpc
*/
public Rpc get_device_id(int index)
{
Rpc_get_device_id rpc = new Rpc_get_device_id(this);
rpc.set_index(index);
return rpc;
}
/**
* Gets the human readable name of a device by its internal id
*
* @param id Internal device id
* @return The rpc
*/
public Rpc get_device_name(String id)
{
Rpc_get_device_name rpc = new Rpc_get_device_name(this);
rpc.set_id(id);
return rpc;
}
/**
* Gets the number of fields of an attached device by its internal id
*
* @param id Internal device id
* @return The rpc
*/
public Rpc device_get_num_fields(String id)
{
Rpc_device_get_num_fields rpc = new Rpc_device_get_num_fields(this);
rpc.set_id(id);
return rpc;
}
/**
* Get a field of an attached device by the internal device id and the field id
*
* @param id_dev Internal device id
* @param id_field Numeric field id
* @return The rpc
*/
public Rpc device_get_field(String id_dev, int id_field)
{
Rpc_device_get_field rpc = new Rpc_device_get_field(this);
rpc.set_id(id_dev);
rpc.set_field_id(id_field);
return rpc;
}
/**
* Set the value of a field on an attached device
*
* @param id_dev Internal device id
* @param id_field Numeric field id
* @param value Value of the field
*/
public void device_set_field_value(String id_dev, int id_field, Object value)
{
Rpc_group group = new Rpc_group(this);
Rpc_device_set_field_value rpc = new Rpc_device_set_field_value(this);
rpc.set_id(id_dev);
rpc.set_field_id(id_field);
rpc.set_field_value(value);
group.add_rpcs(rpc);
this.run_rpc_group(group);
}
/**
* Set the human readable name of a device
*
* @param dev Device object
* @param name Human readable device name
*/
public void device_set_name(Device dev, String name)
{
this.device_set_name(dev.get_id(), name);
}
/**
* Set the human readable name of a device
*
* @param id Internal device id
* @param name Human readable device name
*/
public void device_set_name(String id, String name)
{
Rpc_group group = new Rpc_group(this);
Rpc_set_device_name rpc = new Rpc_set_device_name(this);
rpc.set_id(id);
rpc.set_name(name);
group.add_rpcs(rpc);
this.run_rpc_group(group);
}
/**
* Requests all device ids from the basestation
*
* @return The rpc
*/
public Rpc get_device_ids()
{
return new Rpc_get_device_ids(this);
}
/**
* Queries a device object from the basestation
*
* @return The rpc
*/
public Rpc rpc_get_device(String id)
{
Rpc_get_device rpc = new Rpc_get_device(this);
rpc.set_id(id);
return rpc;
}
@Override
public void on_group_finish(Rpc_group group)
{
}
//General purpose functions
/**
* Get a list of all known devices
*
* @return A list of all attached devices
*/
public List<Device> get_devices()
{
return this.state.get_devices();
}
/**
* Get resources
*
* @return Resources
*/
public Resources get_resources()
{
return this.resources;
}
/**
* Get device by id
*
* @param id Internal device id
* @return Device instance
*/
public Device get_device(String id)
{
return this.state.get_device(id);
}
/**
* Get serializable version of this basestation
*
* @return Serializable representation
*/
public Basestation_state get_state()
{
return this.state;
}
/**
* Returns the protocol being used
*
* @return Current protocol
*/
public Network.Protocol get_protocol()
{
return this.state.get_protocol();
}
/**
* Returns whether requests to the basestation should be bundled together or not
*
* @return Bundle requests
*/
public boolean do_bundle_requests()
{
return this.state.get_protocol() == Network.Protocol.HTTP;
}
/**
* Quits all tasks related to this basestation
*/
public void destroy()
{
if(this.rx_thread != null)
this.rx_thread.kill();
if(this.http_client != null)
this.http_client.close();
}
}
| |
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package de.schenk.objectweb.asm.commons;
import de.schenk.objectweb.asm.Handle;
import de.schenk.objectweb.asm.Label;
import de.schenk.objectweb.asm.MethodVisitor;
import de.schenk.objectweb.asm.Opcodes;
/**
* A {@link MethodVisitor} that can be used to approximate method size.
*
* @author Eugene Kuleshov
*/
public class CodeSizeEvaluator extends MethodVisitor implements Opcodes {
private int minSize;
private int maxSize;
public CodeSizeEvaluator(final MethodVisitor mv) {
this(Opcodes.ASM5, mv);
}
protected CodeSizeEvaluator(final int api, final MethodVisitor mv) {
super(api, mv);
}
public int getMinSize() {
return this.minSize;
}
public int getMaxSize() {
return this.maxSize;
}
@Override
public void visitInsn(final int opcode) {
minSize += 1;
maxSize += 1;
if (mv != null) {
mv.visitInsn(opcode);
}
}
@Override
public void visitIntInsn(final int opcode, final int operand) {
if (opcode == SIPUSH) {
minSize += 3;
maxSize += 3;
} else {
minSize += 2;
maxSize += 2;
}
if (mv != null) {
mv.visitIntInsn(opcode, operand);
}
}
@Override
public void visitVarInsn(final int opcode, final int var) {
if (var < 4 && opcode != RET) {
minSize += 1;
maxSize += 1;
} else if (var >= 256) {
minSize += 4;
maxSize += 4;
} else {
minSize += 2;
maxSize += 2;
}
if (mv != null) {
mv.visitVarInsn(opcode, var);
}
}
@Override
public void visitTypeInsn(final int opcode, final String type) {
minSize += 3;
maxSize += 3;
if (mv != null) {
mv.visitTypeInsn(opcode, type);
}
}
@Override
public void visitFieldInsn(final int opcode, final String owner,
final String name, final String desc) {
minSize += 3;
maxSize += 3;
if (mv != null) {
mv.visitFieldInsn(opcode, owner, name, desc);
}
}
@Deprecated
@Override
public void visitMethodInsn(final int opcode, final String owner,
final String name, final String desc) {
if (api >= Opcodes.ASM5) {
super.visitMethodInsn(opcode, owner, name, desc);
return;
}
doVisitMethodInsn(opcode, owner, name, desc,
opcode == Opcodes.INVOKEINTERFACE);
}
@Override
public void visitMethodInsn(final int opcode, final String owner,
final String name, final String desc, final boolean itf) {
if (api < Opcodes.ASM5) {
super.visitMethodInsn(opcode, owner, name, desc, itf);
return;
}
doVisitMethodInsn(opcode, owner, name, desc, itf);
}
private void doVisitMethodInsn(int opcode, final String owner,
final String name, final String desc, final boolean itf) {
if (opcode == INVOKEINTERFACE) {
minSize += 5;
maxSize += 5;
} else {
minSize += 3;
maxSize += 3;
}
if (mv != null) {
mv.visitMethodInsn(opcode, owner, name, desc, itf);
}
}
@Override
public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
Object... bsmArgs) {
minSize += 5;
maxSize += 5;
if (mv != null) {
mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
}
}
@Override
public void visitJumpInsn(final int opcode, final Label label) {
minSize += 3;
if (opcode == GOTO || opcode == JSR) {
maxSize += 5;
} else {
maxSize += 8;
}
if (mv != null) {
mv.visitJumpInsn(opcode, label);
}
}
@Override
public void visitLdcInsn(final Object cst) {
if (cst instanceof Long || cst instanceof Double) {
minSize += 3;
maxSize += 3;
} else {
minSize += 2;
maxSize += 3;
}
if (mv != null) {
mv.visitLdcInsn(cst);
}
}
@Override
public void visitIincInsn(final int var, final int increment) {
if (var > 255 || increment > 127 || increment < -128) {
minSize += 6;
maxSize += 6;
} else {
minSize += 3;
maxSize += 3;
}
if (mv != null) {
mv.visitIincInsn(var, increment);
}
}
@Override
public void visitTableSwitchInsn(final int min, final int max,
final Label dflt, final Label... labels) {
minSize += 13 + labels.length * 4;
maxSize += 16 + labels.length * 4;
if (mv != null) {
mv.visitTableSwitchInsn(min, max, dflt, labels);
}
}
@Override
public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
final Label[] labels) {
minSize += 9 + keys.length * 8;
maxSize += 12 + keys.length * 8;
if (mv != null) {
mv.visitLookupSwitchInsn(dflt, keys, labels);
}
}
@Override
public void visitMultiANewArrayInsn(final String desc, final int dims) {
minSize += 4;
maxSize += 4;
if (mv != null) {
mv.visitMultiANewArrayInsn(desc, dims);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.exem.flamingo.shared.model.rest;
import java.io.Serializable;
import java.sql.Timestamp;
/**
* User Domain Object.
*
* @author Myeongha KIM
* @since 2.0
*/
public class User implements Serializable {
private Long id;
private String username;
private String password;
private String email;
private String name;
private Long orgId;
private Short authId;
private Short level;
private Boolean enabled;
private Timestamp registerDate;
private Timestamp updateDate;
private String linuxUserHome;
private String hdfsUserHome;
private String userGroup;
private String description;
private String websocketKey;
public User() {}
public User(Long id, String username, String password, String email, String name, Long orgId, Short authId, Short level, Boolean enabled, Timestamp registerDate, Timestamp updateDate, String linuxUserHome, String hdfsUserHome, String userGroup, String description, String websocketKey) {
this.id = id;
this.username = username;
this.password = password;
this.email = email;
this.name = name;
this.orgId = orgId;
this.authId = authId;
this.level = level;
this.enabled = enabled;
this.registerDate = registerDate;
this.updateDate = updateDate;
this.linuxUserHome = linuxUserHome;
this.hdfsUserHome = hdfsUserHome;
this.userGroup = userGroup;
this.description = description;
this.websocketKey = websocketKey;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Long getOrgId() {
return orgId;
}
public void setOrgId(Long orgId) {
this.orgId = orgId;
}
public Short getAuthId() {
return authId;
}
public void setAuthId(Short authId) {
this.authId = authId;
}
public Short getLevel() {
return level;
}
public void setLevel(Short level) {
this.level = level;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public Timestamp getRegisterDate() {
return registerDate;
}
public void setRegisterDate(Timestamp registerDate) {
this.registerDate = registerDate;
}
public Timestamp getUpdateDate() {
return updateDate;
}
public void setUpdateDate(Timestamp updateDate) {
this.updateDate = updateDate;
}
public String getLinuxUserHome() {
return linuxUserHome;
}
public void setLinuxUserHome(String linuxUserHome) {
this.linuxUserHome = linuxUserHome;
}
public String getHdfsUserHome() {
return hdfsUserHome;
}
public void setHdfsUserHome(String hdfsUserHome) {
this.hdfsUserHome = hdfsUserHome;
}
public String getUserGroup() {
return userGroup;
}
public void setUserGroup(String userGroup) {
this.userGroup = userGroup;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getWebsocketKey() {
return websocketKey;
}
public void setWebsocketKey(String websocketKey) {
this.websocketKey = websocketKey;
}
@Override
public String toString() {
return "User{" +
"id=" + id +
", username='" + username + '\'' +
", password='" + password + '\'' +
", email='" + email + '\'' +
", name='" + name + '\'' +
", orgId=" + orgId +
", authId=" + authId +
", level=" + level +
", enabled=" + enabled +
", registerDate=" + registerDate +
", updateDate=" + updateDate +
", linuxUserHome='" + linuxUserHome + '\'' +
", hdfsUserHome='" + hdfsUserHome + '\'' +
", userGroup='" + userGroup + '\'' +
", description='" + description + '\'' +
", websocketKey='" + websocketKey + '\'' +
'}';
}
}
| |
/*
*Copyright (c) 2005-2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
*WSO2 Inc. licenses this file to you under the Apache License,
*Version 2.0 (the "License"); you may not use this file except
*in compliance with the License.
*You may obtain a copy of the License at
*
*http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing,
*software distributed under the License is distributed on an
*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
*KIND, either express or implied. See the License for the
*specific language governing permissions and limitations
*under the License.
*/
package org.wso2.carbon.identity.oauth2.authz.handlers;
import org.apache.amber.oauth2.common.exception.OAuthSystemException;
import org.apache.axiom.util.base64.Base64Utils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.oauth.cache.OAuthCacheKey;
import org.wso2.carbon.identity.oauth.common.OAuthConstants;
import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration;
import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception;
import org.wso2.carbon.identity.oauth2.authz.OAuthAuthzReqMessageContext;
import org.wso2.carbon.identity.oauth2.dto.OAuth2AuthorizeReqDTO;
import org.wso2.carbon.identity.oauth2.dto.OAuth2AuthorizeRespDTO;
import org.wso2.carbon.identity.oauth2.model.AccessTokenDO;
import org.wso2.carbon.identity.oauth2.model.RefreshTokenValidationDataDO;
import org.wso2.carbon.identity.oauth2.util.OAuth2Util;
import java.sql.Timestamp;
import java.util.Date;
import java.util.UUID;
public class TokenResponseTypeHandler extends AbstractResponseTypeHandler {
private static Log log = LogFactory.getLog(TokenResponseTypeHandler.class);
@Override
public OAuth2AuthorizeRespDTO issue(OAuthAuthzReqMessageContext oauthAuthzMsgCtx)
throws IdentityOAuth2Exception {
OAuth2AuthorizeRespDTO respDTO = new OAuth2AuthorizeRespDTO();
OAuth2AuthorizeReqDTO authorizationReqDTO = oauthAuthzMsgCtx.getAuthorizationReqDTO();
String scope = OAuth2Util.buildScopeString(oauthAuthzMsgCtx.getApprovedScope());
respDTO.setCallbackURI(authorizationReqDTO.getCallbackUrl());
String consumerKey = authorizationReqDTO.getConsumerKey();
String authorizedUser = authorizationReqDTO.getUsername();
OAuthCacheKey cacheKey = new OAuthCacheKey(consumerKey + ":" + authorizedUser + ":" + scope);
String userStoreDomain = null;
//select the user store domain when multiple user stores are configured.
if (OAuth2Util.checkAccessTokenPartitioningEnabled() &&
OAuth2Util.checkUserNameAssertionEnabled()) {
userStoreDomain = OAuth2Util.getUserStoreDomainFromUserId(authorizedUser);
}
synchronized ((consumerKey + ":" + authorizedUser + ":" + scope).intern()) {
// check if valid access token exists in cache
if (cacheEnabled) {
AccessTokenDO accessTokenDO = (AccessTokenDO) oauthCache.getValueFromCache(cacheKey);
if (accessTokenDO != null) {
if(log.isDebugEnabled()) {
log.debug("Retrieved active Access Token : " + accessTokenDO.getAccessToken() +
" for Client Id : " + consumerKey + ", User ID :" + authorizedUser +
" and Scope : " + scope + " from cache");
}
long expireTime = OAuth2Util.getTokenExpireTimeMillis(accessTokenDO);
if (expireTime > 0) {
if (log.isDebugEnabled()) {
log.debug("Access Token " + accessTokenDO.getAccessToken() + " is still valid");
}
respDTO.setAccessToken(accessTokenDO.getAccessToken());
respDTO.setValidityPeriod(accessTokenDO.getValidityPeriod());
respDTO.setScope(oauthAuthzMsgCtx.getApprovedScope());
return respDTO;
} else {
//Token is expired. Clear it from cache and mark it as expired on database
oauthCache.clearCacheEntry(cacheKey);
tokenMgtDAO.setAccessTokenState(accessTokenDO.getAccessToken(),
OAuthConstants.TokenStates.TOKEN_STATE_EXPIRED,
UUID.randomUUID().toString(), userStoreDomain);
if(log.isDebugEnabled()){
log.debug("Access Token " + accessTokenDO.getAccessToken() +
" is expired. Therefore cleared it from cache and marked it" +
" as expired in database");
}
}
} else {
if(log.isDebugEnabled()) {
log.debug("No active access token found in cache for Client ID : " + consumerKey +
", User ID : " + authorizedUser + " and Scope : " + scope);
}
}
}
// check if the last issued access token is still active and valid in the database
AccessTokenDO accessTokenDO = tokenMgtDAO.retrieveLatestAccessToken(
consumerKey, authorizedUser, userStoreDomain, scope, false);
if (accessTokenDO != null) {
if(log.isDebugEnabled()) {
log.debug("Retrieved latest Access Token : " + accessTokenDO.getAccessToken() +
" for Client ID : " + consumerKey + ", User ID :" + authorizedUser +
" and Scope : " + scope + " from database");
}
if (OAuthConstants.TokenStates.TOKEN_STATE_ACTIVE.equals(accessTokenDO.getTokenState()) &&
OAuth2Util.getTokenExpireTimeMillis(accessTokenDO) > 0) {
// token is active and valid
if (log.isDebugEnabled()) {
log.debug("Access token : " + accessTokenDO.getAccessToken() + " is still valid");
}
if (cacheEnabled) {
oauthCache.addToCache(cacheKey, accessTokenDO);
if (log.isDebugEnabled()) {
log.debug("Access Token : " + accessTokenDO.getAccessToken() +
" was added to cache for cache key : " + cacheKey.getCacheKeyString());
}
}
respDTO.setAccessToken(accessTokenDO.getAccessToken());
respDTO.setValidityPeriod(OAuth2Util.getTokenExpireTimeMillis(accessTokenDO)/1000);
respDTO.setScope(oauthAuthzMsgCtx.getApprovedScope());
return respDTO;
} else {
if(log.isDebugEnabled()) {
log.debug("Access token + " + accessTokenDO.getAccessToken() + " is not valid anymore");
}
String tokenState = accessTokenDO.getTokenState();
if(OAuthConstants.TokenStates.TOKEN_STATE_ACTIVE.equals(tokenState)){
// Token is expired. Mark it as expired on database
tokenMgtDAO.setAccessTokenState(accessTokenDO.getAccessToken(),
OAuthConstants.TokenStates.TOKEN_STATE_EXPIRED,
UUID.randomUUID().toString(), userStoreDomain);
if (log.isDebugEnabled()) {
log.debug("Marked Access Token " + accessTokenDO.getAccessToken() + " as expired");
}
} else {
//Token is revoked or inactive
if (log.isDebugEnabled()) {
log.debug("Access Token " + accessTokenDO.getAccessToken() + " is " + accessTokenDO.getTokenState());
}
}
}
} else {
if(log.isDebugEnabled()) {
log.debug("No access token found in database for Client ID : " + consumerKey +
", User ID : " + authorizedUser + " and Scope : " + scope +
". Therefore issuing new access token");
}
}
// issue a new access token
String accessToken;
String refreshToken;
try {
accessToken = oauthIssuerImpl.accessToken();
refreshToken = oauthIssuerImpl.refreshToken();
} catch (OAuthSystemException e) {
throw new IdentityOAuth2Exception(
"Error occurred while generating access token and refresh token", e);
}
accessTokenDO = tokenMgtDAO.retrieveLatestAccessToken(
consumerKey, authorizedUser, userStoreDomain, scope, true);
if(accessTokenDO != null){
RefreshTokenValidationDataDO refreshTokenValidationDataDO =
tokenMgtDAO.validateRefreshToken(consumerKey, accessTokenDO.getRefreshToken());
String state = refreshTokenValidationDataDO.getRefreshTokenState();
long createdTime = refreshTokenValidationDataDO.getIssuedAt();
long refreshValidity = OAuthServerConfiguration.getInstance().
getRefreshTokenValidityPeriodInSeconds() * 1000;
long currentTime = System.currentTimeMillis();
long skew = OAuthServerConfiguration.getInstance().getTimeStampSkewInSeconds() * 1000;
if(OAuthConstants.TokenStates.TOKEN_STATE_EXPIRED.equals(state) &&
createdTime + refreshValidity - (currentTime + skew) > 1000){
refreshToken = accessTokenDO.getRefreshToken();
}
}
if(OAuth2Util.checkUserNameAssertionEnabled()) {
String userName = oauthAuthzMsgCtx.getAuthorizationReqDTO().getUsername();
//use ':' for token & userStoreDomain separation
String accessTokenStrToEncode = accessToken + ":" + userName;
accessToken = Base64Utils.encode(accessTokenStrToEncode.getBytes());
String refreshTokenStrToEncode = refreshToken + ":" + userName;
refreshToken = Base64Utils.encode(refreshTokenStrToEncode.getBytes());
}
Timestamp timestamp = new Timestamp(new Date().getTime());
// Default Validity Period
long validityPeriod = OAuthServerConfiguration.getInstance().
getUserAccessTokenValidityPeriodInSeconds();
// if a VALID validity period is set through the callback, then use it
long callbackValidityPeriod = oauthAuthzMsgCtx.getValidityPeriod();
if ((callbackValidityPeriod != OAuthConstants.UNASSIGNED_VALIDITY_PERIOD)
&& callbackValidityPeriod > 0) {
validityPeriod = callbackValidityPeriod;
}
accessTokenDO = new AccessTokenDO(consumerKey, authorizationReqDTO.getUsername(),
oauthAuthzMsgCtx.getApprovedScope(), timestamp, validityPeriod,
OAuthConstants.USER_TYPE_FOR_USER_TOKEN);
accessTokenDO.setAccessToken(accessToken);
accessTokenDO.setRefreshToken(refreshToken);
accessTokenDO.setTokenState(OAuthConstants.TokenStates.TOKEN_STATE_ACTIVE);
// Persist the access token in database
tokenMgtDAO.storeAccessToken(accessToken, authorizationReqDTO.getConsumerKey(),
accessTokenDO, userStoreDomain);
if (log.isDebugEnabled()) {
log.debug("Persisted Access Token : " + accessToken + " for " +
"Client ID : " + authorizationReqDTO.getConsumerKey() +
", Authorized User : " + authorizationReqDTO.getUsername() +
", Timestamp : " + timestamp +
", Validity period : " + validityPeriod +
", Scope : " + OAuth2Util.buildScopeString(oauthAuthzMsgCtx.getApprovedScope()) +
", Callback URL : " + authorizationReqDTO.getCallbackUrl() +
", Token State : " + OAuthConstants.TokenStates.TOKEN_STATE_ACTIVE +
" and User Type : " + OAuthConstants.USER_TYPE_FOR_USER_TOKEN);
}
// Add the access token to the cache.
if(cacheEnabled){
oauthCache.addToCache(cacheKey, accessTokenDO);
if(log.isDebugEnabled()){
log.debug("Access Token : " + accessToken + " was added to OAuthCache for " +
"cache key : " + cacheKey.getCacheKeyString());
}
}
respDTO.setAccessToken(accessToken);
respDTO.setValidityPeriod(validityPeriod);
respDTO.setScope(accessTokenDO.getScope());
return respDTO;
}
}
}
| |
package ch.unifr.pai.twice.widgets.client;
/*
* Copyright 2013 Oliver Schmid
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.HashMap;
import java.util.Map;
import ch.unifr.pai.twice.comm.serverPush.client.RemoteWidget;
import ch.unifr.pai.twice.utils.device.client.UUID;
import com.google.gwt.canvas.client.Canvas;
import com.google.gwt.canvas.dom.client.Context2d;
import com.google.gwt.canvas.dom.client.Context2d.TextAlign;
import com.google.gwt.canvas.dom.client.Context2d.TextBaseline;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.Node;
import com.google.gwt.dom.client.Style.BorderStyle;
import com.google.gwt.dom.client.Style.Display;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.Event.NativePreviewEvent;
import com.google.gwt.user.client.Event.NativePreviewHandler;
import com.google.gwt.user.client.Timer;
import com.google.gwt.user.client.ui.AbsolutePanel;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.HasValue;
import com.google.gwt.user.client.ui.TextBox;
public class MultiFocusTextBox extends Composite implements HasValue<String>{
private final Map<String, Cursor> cursors = new HashMap<String, Cursor>();
FlowPanel p = new FlowPanel();
private String value;
AbsolutePanel multiFocus = new AbsolutePanel();
private final int cursorSpeed = 700;
String[] colors = new String[]{"red", "blue"};
Cursor blueCursor = new Cursor("blue", UUID.createNew());
Cursor redCursor = new Cursor("red", UUID.createNew());
private final Context2d context;
private TextBox textBox = new TextBox();
private final Canvas c;
private Timer blinkTimer;
private boolean cursorsVisible;
public MultiFocusTextBox() {
blinkTimer = new Timer(){
@Override
public void run() {
for(Cursor c : cursors.values()){
c.setVisible(cursorsVisible);
}
cursorsVisible = !cursorsVisible;
}
};
blinkTimer.scheduleRepeating(cursorSpeed);
p.getElement().getStyle().setDisplay(Display.INLINE_BLOCK);
c = Canvas.createIfSupported();
c.getElement().getStyle().setBorderWidth(0, Unit.PX);
c.getElement().getStyle().setProperty("outline", "none");
c.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
//TODO if it is a new device, create a new cursor with the
repositionCursor(null, event.getRelativeX(c.getCanvasElement()), event.getRelativeY(c.getCanvasElement()));
}
});
multiFocus.insert(c, 0, 0, 0);
initWidget(multiFocus);
getElement().getStyle().setBorderStyle(BorderStyle.SOLID);
getElement().getStyle().setBorderWidth(1, Unit.PX);
c.getElement().getStyle().setMargin(5, Unit.PX);
context = c.getContext2d();
context.setTextAlign(TextAlign.LEFT);
context.setTextBaseline(TextBaseline.TOP);
context.setFont("13px sans-serif;");
// TODO Auto-generated constructor stub
// multiFocus.setVisible(false);
multiFocus.setWidth("161px");
multiFocus.setHeight("28px");
}
private void repositionCursor(String deviceId, int x, int y){
blueCursor.setPosition(findChar(x));
}
private int findChar(int x){
StringBuilder b = new StringBuilder();
for(char c : value.toCharArray()){
b.append(c);
double textWidth = context.measureText(b.toString()).getWidth();
if(x<textWidth){
double charWidth = context.measureText(String.valueOf(c)).getWidth();
if(textWidth-(charWidth/2.0)>x)
return Math.max(b.length()-1, 0);
else
return b.length();
}
}
return b.length();
}
private void registerCursor(String uuid, Cursor c){
cursors.put(uuid, c);
}
private void unregisterCursor(String uuid){
cursors.remove(uuid);
}
protected class Cursor extends HTML{
int x;
int y;
final String uuid;
int position;
HandlerRegistration reg;
private Cursor(String color, String uuid){
this.uuid = uuid;
getElement().getStyle().setBackgroundColor(color);
setWidth("1px");
setHeight("18px");
}
private void hide(){
multiFocus.remove(this);
unregisterCursor(uuid);
if(reg!=null){
reg.removeHandler();
reg = null;
}
}
private void show(){
registerCursor(uuid, this);
multiFocus.add(this);
multiFocus.setWidgetPosition(this, x+5, y+5);
reg = Event.addNativePreviewHandler(new NativePreviewHandler() {
@Override
public void onPreviewNativeEvent(NativePreviewEvent event) {
if(event.getTypeInt() == Event.ONMOUSEUP && !c.getElement().isOrHasChild(Element.as(event.getNativeEvent().getEventTarget()))){
//TODO enable after testing
// hide();
}
}
});
}
public void setPosition(int position){
hide();
this.position = position;
this.x = (int)Math.max(0, context.measureText(position<value.length() ? value.substring(0, position) : value).getWidth());
show();
}
public int getPosition(){
return position;
}
}
@Override
public HandlerRegistration addValueChangeHandler(
ValueChangeHandler<String> handler) {
return addHandler(handler, ValueChangeEvent.getType());
}
@Override
public String getValue() {
return value;
}
protected Cursor getOrCreateCursor(String uuid){
Cursor c = cursors.get(uuid);
if(c==null){
c = new Cursor("blue", uuid);
registerCursor(uuid, c);
}
return c;
}
protected Map<String, Cursor> getCursors(){
return cursors;
}
private void processInput(String uuid, int pos, char c){
if(pos<=value.length()){
setValue(value.substring(0, pos)+c+((pos==value.length())? "" : value.substring(pos)));
}
for(Cursor cursor : cursors.values()){
if(pos<=cursor.getPosition()){
cursor.setPosition(cursor.getPosition()+1);
}
}
}
@Override
public void setValue(String value) {
this.value = value;
textBox.setValue(value);
context.clearRect(0, 0, c.getOffsetWidth(), c.getOffsetHeight());
context.fillText(value, 0, 0);
}
@Override
public void setValue(String value, boolean fireEvents) {
setValue(value);
//TODO
}
}
| |
package com.wordsaretoys.quencher.data;
import java.util.ArrayList;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.util.SparseArray;
import com.wordsaretoys.quencher.common.Storable;
import com.wordsaretoys.quencher.common.Storage;
/**
* represents a single track
* a track is a timeline of notes tied to a voice,
* a tone scale, and a time signature
*/
public class Track extends Storable {
/*
* default values and constants
*/
static final String TAG = "Track";
// initial track volume
static final float DefaultVolume = 0.5f;
// initial timing parameters
static final int DefaultBeats = 4;
static final int DefaultSlots = 1;
/*
* data labels for database representation
*/
public static final String L_TABLE = "track";
public static final String L_SCORE = "score";
public static final String L_VOICE = "voice";
public static final String L_SCALE = "scale";
public static final String L_VOL = "vol";
public static final String L_PAN = "pan";
public static final String L_SLOTS = "slots";
public static final String L_BEATS = "beats";
public static final String L_INDEX = "xedni";
public static final String L_MUTED = "muted";
public static final String L_LOCKED = "locked";
public static final String[] L_FIELDS = {
L_ID, L_SCORE, L_VOICE, L_SCALE, L_VOL, L_PAN,
L_SLOTS, L_BEATS, L_INDEX, L_MUTED, L_LOCKED
};
public static final String L_ORDER = L_INDEX;
/*
* data variables
*/
// reference to parent
private Score score;
// collection of notes, indexed by positions
private SparseArray<Note> notes;
// voice assigned to this track
private Voice voice;
// scale/key assigned to this track
private Scale scale;
// beats per bar
private int beats;
// note slots per beat
private int slots;
// relative volume
private float volume;
// pan l/r setting
private float pan;
// track ordering within score
private int index;
// muting state
private boolean muted;
// locked state
private boolean locked;
// trash pile for deleted notes
private ArrayList<Note> trash;
/**
* ctor, creates new track
* @param s scale to join
* @param k scale assigned to track
* @param v voice assigned to track
*/
public Track(Score s, Scale k, Voice v) {
super();
score = s;
notes = new SparseArray<Note>();
voice = v;
scale = k;
beats = DefaultBeats;
slots = DefaultSlots;
volume = DefaultVolume;
pan = 0;
index = s.getTrackCount();
trash = new ArrayList<Note>();
}
/**
* default ctor
* @param s score to join
*/
public Track(Score s) {
this(s, Scale.getDefault(), Voice.getDefault());
}
/**
* copies data from existing track
* @param t track to copy from
*/
public synchronized void copy(Track t) {
voice = t.voice;
scale = t.scale;
volume = t.volume;
pan = t.pan;
slots = t.slots;
beats = t.beats;
notes = new SparseArray<Note>();
for (int i = 0, il = t.notes.size(); i < il; i++) {
Note note = new Note(this);
note.copy(t.notes.valueAt(i));
notes.put(note.getIndex(), note);
}
index = t.index;
muted = t.muted;
locked = t.locked;
}
/**
* return the note at a given position
* @param i position within track
* @return note at that position, or null
*/
public Note getNote(int i) {
return notes.get(i);
}
/**
* add a note to a given position
* pitch MUST correspond to current scale
*
* @param i position to add to
* @param p pitch number
*/
public void setNote(int i, int p) {
Note note = notes.get(i);
if (note == null) {
note = new Note(this);
notes.put(i, note);
}
note.setIndex(i);
note.setPitchNumber(p);
onChange();
}
/**
* remove note from a given position
* @param i position to remove from
*/
public void clearNote(int i) {
Note note = notes.get(i);
if (note != null) {
trash.add(note);
notes.delete(i);
}
onChange();
}
/**
* get number of notes in track
* @return note count
*/
public int getNoteCount() {
return notes.size();
}
/**
* get note at array index
* this is the collection version of getNote()
*
* @param i array index of note
* @return note object
*/
public Note getNoteAt(int i) {
return notes.valueAt(i);
}
/**
* get the voice assigned to the track
* @return voice object
*/
public Voice getVoice() {
return voice;
}
/**
* assign a voice to this track
* @param v voice object
*/
public synchronized void setVoice(Voice v) {
voice = v;
onChange();
}
/**
* get the scale assigned to this track
* @return scale object
*/
public Scale getScale() {
return scale;
}
/**
* assign a scale to this track
*
* if notes are assigned to the track, the
* new scale MUST have the same tone count
*
* @param s scale object
*/
public synchronized void setScale(Scale s) {
scale = s;
onChange();
}
/**
* get relative volume
* @return volume
*/
public float getVolume() {
return volume;
}
/**
* get pan l/r (-1..1)
* @return pan setting
*/
public float getPan() {
return pan;
}
/**
* set relative volume
* @param v volume
*/
public synchronized void setVolume(float v) {
volume = v;
onChange();
}
/**
* set pan l/r (-1..1)
* @param p pan setting
*/
public synchronized void setPan(float p) {
pan = p;
onChange();
}
/**
* get timing slots
* @return slots count
*/
public int getSlots() {
return slots;
}
/**
* set timing slots
* @param s slots count
*/
public synchronized void setSlots(int s) {
slots = s;
onChange();
}
/**
* get timing beats
* @return beat count
*/
public int getBeats() {
return beats;
}
/**
* set timing beats
* @param b beat count
*/
public synchronized void setBeats(int b) {
beats = b;
onChange();
}
/**
* get the timing ratio for the track
* @return track timing ratio
*/
public float getTiming() {
return (float)beats / (float)(beats * slots);
}
/**
* get track index within score
* @return track index
*/
public int getIndex() {
return index;
}
/**
* set track index within score
* @param i track index
*/
public synchronized void setIndex(int i) {
index = i;
onChange();
}
/**
* get mute status
* @return true if track is muted
*/
public boolean isMuted() {
return muted;
}
/**
* set muted state
* @param m true if track is to be muted
*/
public synchronized void setMuted(boolean m) {
muted = m;
onChange();
}
/**
* get lock status
* @return true if track is locked against edits
*/
public boolean isLocked() {
return locked;
}
/**
* set lock status
* @param l true if track is locked against edits
*/
public synchronized void setLocked(boolean l) {
locked = l;
onChange();
}
/**
* get the score containing this track
* @return parent object
*/
public Score getScore() { return score; }
/**
* get time offset in decimal seconds for a given note position
* @param i note position
* @return decimal seconds
*/
public float positionToTime(int i) {
return 60f * (float) i * getTiming() / (float) score.getTempo();
}
/**
* get note position for a given time offset in decimal seconds
* @param time decimal seconds
* @return note position
*/
public int timeToPosition(float time) {
return (int)((time * (float) score.getTempo()) / (60f * getTiming()));
}
@Override
public String getTableName() {
return L_TABLE;
}
@Override
public String[] getFieldNames() {
return L_FIELDS;
}
@Override
public String getOrderingField() {
return L_ORDER;
}
@Override
public void readFields(Cursor tc) {
super.readFields(tc);
long voiceId = tc.getLong(tc.getColumnIndex(L_VOICE));
voice = Voice.fromId(voiceId);
// crash protection
if (voice == null) {
voice = Voice.getDefault();
}
long scaleId = tc.getLong(tc.getColumnIndex(L_SCALE));
scale = Scale.fromId(scaleId);
// crash protected
if (scale == null) {
scale = Scale.getDefault();
}
volume = tc.getFloat(tc.getColumnIndex(L_VOL));
pan = tc.getFloat(tc.getColumnIndex(L_PAN));
slots = tc.getInt(tc.getColumnIndex(L_SLOTS));
beats = tc.getInt(tc.getColumnIndex(L_BEATS));
index = tc.getInt(tc.getColumnIndex(L_INDEX));
muted = (tc.getInt(tc.getColumnIndex(L_MUTED)) == 1);
locked = (tc.getInt(tc.getColumnIndex(L_LOCKED)) == 1);
Cursor nc = Note.selectByTrack(id);
if (nc.moveToFirst()) {
do {
Note note = new Note(this);
note.readFields(nc);
notes.put(note.getIndex(), note);
} while (nc.moveToNext());
}
nc.close();
}
@Override
public synchronized void writeFields(ContentValues values) {
super.writeFields(values);
values.put(L_SCORE, score.getId());
values.put(L_VOICE, voice.getId());
values.put(L_SCALE, scale.getId());
values.put(L_VOL, volume);
values.put(L_PAN, pan);
values.put(L_SLOTS, slots);
values.put(L_BEATS, beats);
values.put(L_INDEX, index);
values.put(L_MUTED, muted ? 1 : 0);
values.put(L_LOCKED, locked ? 1 : 0);
}
@Override
protected void onChange() {
super.onChange();
// parent onChange fires ScoreChange event
score.onChange();
}
@Override
public void write(SQLiteDatabase db) {
super.write(db);
for (int i = 0, il = notes.size(); i < il; i++) {
Note note = notes.valueAt(i);
note.write(db);
}
// take out the trash as well
for (Note note : trash) {
note.delete(db);
}
trash.clear();
}
@Override
public void delete(SQLiteDatabase db) {
super.delete(db);
// a mass delete by key is much faster
// than calling delete() for each note
Note.deleteByTrack(db, id);
}
/**
* select all tracks for a given score
* @param id database id to filter by
* @return cursor containing selected rows
*/
public static Cursor selectByScore(long id) {
return Storage.INSTANCE.select(
L_TABLE, L_FIELDS, L_ORDER, L_SCORE, id);
}
/**
* determines if an object is used by ANY track
* @param id database id of object
* @return true if object is in use
*/
private static boolean usesObject(String field, long id) {
SQLiteDatabase db = Storage.INSTANCE.getReadableDatabase();
String[] args = { String.valueOf(id) };
String[] cols = { L_ID };
Cursor tc = db.query(
L_TABLE, cols,
field + " = ?", args,
null, null, null);
boolean used = tc.getCount() > 0;
tc.close();
return used;
}
/**
* determines if a scale is used by ANY track
* @param id database id of scale
* @return true if scale is in use
*/
public static boolean usesScale(long id) {
return usesObject(L_SCALE, id);
}
/**
* determines if a voice is used by ANY track
* @param id database id of voice
* @return true if voice is in use
*/
public static boolean usesVoice(long id) {
return usesObject(L_VOICE, id);
}
}
| |
package org.ethereum.util;
import org.ethereum.db.ByteArrayWrapper;
import org.spongycastle.util.encoders.Hex;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class ByteUtil {
public static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
public static final byte[] ZERO_BYTE_ARRAY = new byte[]{0};
/**
* Creates a copy of bytes and appends b to the end of it
*/
public static byte[] appendByte(byte[] bytes, byte b) {
byte[] result = Arrays.copyOf(bytes, bytes.length + 1);
result[result.length - 1] = b;
return result;
}
/**
* The regular {@link java.math.BigInteger#toByteArray()} method isn't quite what we often need:
* it appends a leading zero to indicate that the number is positive and may need padding.
*
* @param b the integer to format into a byte array
* @param numBytes the desired size of the resulting byte array
* @return numBytes byte long array.
*/
public static byte[] bigIntegerToBytes(BigInteger b, int numBytes) {
if (b == null)
return null;
byte[] bytes = new byte[numBytes];
byte[] biBytes = b.toByteArray();
int start = (biBytes.length == numBytes + 1) ? 1 : 0;
int length = Math.min(biBytes.length, numBytes);
System.arraycopy(biBytes, start, bytes, numBytes - length, length);
return bytes;
}
/**
* Omitting sign indication byte.
* <br><br>
* Instead of {@link org.spongycastle.util.BigIntegers#asUnsignedByteArray(BigInteger)}
* <br>we use this custom method to avoid an empty array in case of BigInteger.ZERO
*
* @param value - any big integer number. A <code>null</code>-value will return <code>null</code>
* @return A byte array without a leading zero byte if present in the signed encoding.
* BigInteger.ZERO will return an array with length 1 and byte-value 0.
*/
public static byte[] bigIntegerToBytes(BigInteger value) {
if (value == null)
return null;
byte[] data = value.toByteArray();
if (data.length != 1 && data[0] == 0) {
byte[] tmp = new byte[data.length - 1];
System.arraycopy(data, 1, tmp, 0, tmp.length);
data = tmp;
}
return data;
}
/**
* Returns the amount of nibbles that match each other from 0 ...
* amount will never be larger than smallest input
*
* @param a - first input
* @param b - second input
* @return Number of bytes that match
*/
public static int matchingNibbleLength(byte[] a, byte[] b) {
int i = 0;
int length = a.length < b.length ? a.length : b.length;
while (i < length) {
if (a[i] != b[i])
return i;
i++;
}
return i;
}
/**
* Converts a long value into a byte array.
*
* @param val - long value to convert
* @return <code>byte[]</code> of length 8, representing the long value
*/
public static byte[] longToBytes(long val) {
return ByteBuffer.allocate(8).putLong(val).array();
}
/**
* Converts a long value into a byte array.
*
* @param val - long value to convert
* @return decimal value with leading byte that are zeroes striped
*/
public static byte[] longToBytesNoLeadZeroes(long val) {
// todo: improve performance by while strip numbers until (long >> 8 == 0)
byte[] data = ByteBuffer.allocate(8).putLong(val).array();
return stripLeadingZeroes(data);
}
public static byte[] intToBytes(int val){
if (val == 0) return EMPTY_BYTE_ARRAY;
int lenght = 0;
int tmpVal = val;
while (tmpVal != 0){
tmpVal = tmpVal >> 8;
++lenght;
}
byte[] result = new byte[lenght];
int index = result.length - 1;
while(val != 0){
result[index] = (byte)(val & 0xFF);
val = val >> 8;
index -= 1;
}
return result;
}
/**
* Convert a byte-array into a hex String.<br>
* Works similar to {@link Hex#toHexString}
* but allows for <code>null</code>
*
* @param data - byte-array to convert to a hex-string
* @return hex representation of the data.<br>
* Returns an empty String if the input is <code>null</code>
*
* @see Hex#toHexString
*/
public static String toHexString(byte[] data) {
return data == null ? "" : Hex.toHexString(data);
}
/**
* Calculate packet length
*
* @param msg byte[]
* @return byte-array with 4 elements
*/
public static byte[] calcPacketLength(byte[] msg) {
int msgLen = msg.length;
return new byte[]{
(byte) ((msgLen >> 24) & 0xFF),
(byte) ((msgLen >> 16) & 0xFF),
(byte) ((msgLen >> 8) & 0xFF),
(byte) ((msgLen) & 0xFF)};
}
/**
* Cast hex encoded value from byte[] to int
*
* Limited to Integer.MAX_VALUE: 2^32-1 (4 bytes)
*
* @param b array contains the values
* @return unsigned positive int value.
*/
public static int byteArrayToInt(byte[] b) {
if (b == null || b.length == 0)
return 0;
return new BigInteger(1, b).intValue();
}
/**
* Cast hex encoded value from byte[] to int
*
* Limited to Integer.MAX_VALUE: 2^32-1 (4 bytes)
*
* @param b array contains the values
* @return unsigned positive long value.
*/
public static long byteArrayToLong(byte[] b) {
if (b == null || b.length == 0)
return 0;
return new BigInteger(1, b).longValue();
}
/**
* Turn nibbles to a pretty looking output string
*
* Example. [ 1, 2, 3, 4, 5 ] becomes '\x11\x23\x45'
*
* @param nibbles - getting byte of data [ 04 ] and turning
* it to a '\x04' representation
* @return pretty string of nibbles
*/
public static String nibblesToPrettyString(byte[] nibbles) {
StringBuilder builder = new StringBuilder();
for (byte nibble : nibbles) {
final String nibbleString = oneByteToHexString(nibble);
builder.append("\\x").append(nibbleString);
}
return builder.toString();
}
public static String oneByteToHexString(byte value) {
String retVal = Integer.toString(value & 0xFF, 16);
if (retVal.length() == 1) retVal = "0" + retVal;
return retVal;
}
/**
* Calculate the number of bytes need
* to encode the number
*
* @param val - number
* @return number of min bytes used to encode the number
*/
public static int numBytes(String val) {
BigInteger bInt = new BigInteger(val);
int bytes = 0;
while (!bInt.equals(BigInteger.ZERO)) {
bInt = bInt.shiftRight(8);
++bytes;
}
if (bytes == 0) ++bytes;
return bytes;
}
/**
* @param arg - not more that 32 bits
* @return - bytes of the value pad with complete to 32 zeroes
*/
public static byte[] encodeValFor32Bits(Object arg) {
byte[] data;
// check if the string is numeric
if (arg.toString().trim().matches("-?\\d+(\\.\\d+)?"))
data = new BigInteger(arg.toString().trim()).toByteArray();
// check if it's hex number
else if (arg.toString().trim().matches("0[xX][0-9a-fA-F]+"))
data = new BigInteger(arg.toString().trim().substring(2), 16).toByteArray();
else
data = arg.toString().trim().getBytes();
if (data.length > 32)
throw new RuntimeException("values can't be more than 32 byte");
byte[] val = new byte[32];
int j = 0;
for (int i = data.length; i > 0; --i) {
val[31 - j] = data[i - 1];
++j;
}
return val;
}
/**
* encode the values and concatenate together
*
* @param args Object
* @return byte[]
*/
public static byte[] encodeDataList(Object... args) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
for (Object arg : args) {
byte[] val = encodeValFor32Bits(arg);
try {
baos.write(val);
} catch (IOException e) {
throw new Error("Happen something that should never happen ", e);
}
}
return baos.toByteArray();
}
public static int firstNonZeroByte(byte[] data) {
for (int i = 0; i < data.length; ++i) {
if (data[i] != 0) {
return i;
}
}
return -1;
}
public static byte[] stripLeadingZeroes(byte[] data) {
if (data == null)
return null;
final int firstNonZero = firstNonZeroByte(data);
switch (firstNonZero) {
case -1:
return ZERO_BYTE_ARRAY;
case 0:
return data;
default:
byte[] result = new byte[data.length - firstNonZero];
System.arraycopy(data, firstNonZero, result, 0, data.length - firstNonZero);
return result;
}
}
/**
* increment byte array as a number until max is reached
*
* @param bytes byte[]
* @return boolean
*/
public static boolean increment(byte[] bytes) {
final int startIndex = 0;
int i;
for (i = bytes.length - 1; i >= startIndex; i--) {
bytes[i]++;
if (bytes[i] != 0)
break;
}
// we return false when all bytes are 0 again
return (i >= startIndex || bytes[startIndex] != 0);
}
/**
* Utility function to copy a byte array into a new byte array with given size.
* If the src length is smaller than the given size, the result will be left-padded
* with zeros.
*
* @param value - a BigInteger with a maximum value of 2^256-1
* @return Byte array of given size with a copy of the <code>src</code>
*/
public static byte[] copyToArray(BigInteger value) {
byte[] src = ByteUtil.bigIntegerToBytes(value);
byte[] dest = ByteBuffer.allocate(32).array();
System.arraycopy(src, 0, dest, dest.length - src.length, src.length);
return dest;
}
public static ByteArrayWrapper wrap(byte[] data) {
return new ByteArrayWrapper(data);
}
public static byte[] setBit(byte[] data, int pos, int val) {
if ((data.length * 8) - 1 < pos)
throw new Error("outside byte array limit, pos: " + pos);
int posByte = data.length - 1 - (pos) / 8;
int posBit = (pos) % 8;
byte setter = (byte) (1 << (posBit));
byte toBeSet = data[posByte];
byte result;
if (val == 1)
result = (byte) (toBeSet | setter);
else
result = (byte) (toBeSet & ~setter);
data[posByte] = result;
return data;
}
public static int getBit(byte[] data, int pos) {
if ((data.length * 8) - 1 < pos)
throw new Error("outside byte array limit, pos: " + pos);
int posByte = data.length - 1 - pos / 8;
int posBit = pos % 8;
byte dataByte = data[posByte];
return Math.min(1, (dataByte & (1 << (posBit))));
}
/**
* @param arrays - arrays to merge
* @return - merged array
*/
public static byte[] merge(byte[]... arrays)
{
int arrCount = 0;
int count = 0;
for (byte[] array: arrays)
{
arrCount++;
count += array.length;
}
// Create new array and copy all array contents
byte[] mergedArray = new byte[count];
int start = 0;
for (byte[] array: arrays) {
System.arraycopy(array, 0, mergedArray, start, array.length);
start += array.length;
}
return mergedArray;
}
public static boolean isNullOrZeroArray(byte[] array){
return (array == null) || (array.length == 0);
}
public static boolean isSingleZero(byte[] array){
return (array.length == 1 && array[0] == 0);
}
public static Set<byte[]> difference(Set<byte[]> setA, Set<byte[]> setB){
Set<byte[]> result = new HashSet<>();
for (byte[] elementA : setA){
boolean found = false;
for (byte[] elementB : setB){
if (Arrays.equals(elementA, elementB)){
found = true;
break;
}
}
if (!found) result.add(elementA);
}
return result;
}
public static int length(byte[]... bytes) {
int result = 0;
for (byte[] array : bytes) {
result += (array == null) ? 0 : array.length;
}
return result;
}
}
| |
package schaugenau.input;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.math.Vector4f;
import schaugenau.app.App;
/**
* Distributed under the MIT License. (See accompanying file LICENSE or copy at
* https://github.com/raphaelmenges/schaugenau/blob/master/src/LICENSE)
*
* Input using EyeX.
*
* @author Raphael Menges
*
*/
public class EyeXInput extends TrackerInput {
/** defines **/
protected final float trackBoxFilterSpeed = 0.1f;
/** fields **/
protected Vector3f filteredLeftTrackBox;
protected Vector3f filteredRightTrackBox;
/** methods **/
/* constructor */
public EyeXInput(App app) {
super(app);
filteredLeftTrackBox = new Vector3f();
filteredRightTrackBox = new Vector3f();
}
@Override
/* acquire data */
protected Vector4f aquireTrackerData() {
Vector4f data = new Vector4f();
/*
* data.x = (float) schaugenau.eyecontrol.EyeXUtil.getLeft_x(); data.y =
* (float) schaugenau.eyecontrol.EyeXUtil.getRight_x(); data.z = (float)
* schaugenau.eyecontrol.EyeXUtil.getLeft_y(); data.w = (float)
* schaugenau.eyecontrol.EyeXUtil.getRight_y();
*/ // TODO
return data;
}
@Override
/* process input data */
protected Vector2f processTrackingData(Vector4f data) {
float leftX = data.x;
float rightX = data.y;
float leftY = data.z;
float rightY = data.w;
boolean leftEyeTracked = true;
boolean rightEyeTracked = true;
Vector2f processedData = new Vector2f();
/* check whether input works */
if (leftX == 0 && leftY == 0) {
/* left eye could not be tracked */
leftEyeTracked = false;
}
if (rightX == 0 && rightY == 0) {
/* right eye could not be tracked */
rightEyeTracked = false;
}
/* mirror y to fit internal coordinate system */
leftY = 1.0f - leftY;
rightY = 1.0f - rightY;
/* use input */
if (leftEyeTracked && rightEyeTracked) {
/* everything worked */
isTrackingCurrentlyWorking = true;
processedData.x = (leftX + rightX) / 2.0f;
processedData.y = (leftY + rightY) / 2.0f;
} else if (leftEyeTracked) {
/* only left eye was tracked */
isTrackingCurrentlyWorking = true;
processedData.x = leftX;
processedData.y = leftY;
} else if (rightEyeTracked) {
/* only right eye was tracked */
isTrackingCurrentlyWorking = true;
processedData.x = rightX;
processedData.y = rightY;
} else {
/* nothing was tracked */
isTrackingCurrentlyWorking = false;
processedData.x = 0;
processedData.y = 0;
}
/* do scaling */
processedData.x = processedData.x * app.getWindowResolution().x;
processedData.y = processedData.y * app.getWindowResolution().y;
/* merge with information about head state */
isTrackingCurrentlyWorking = (isTrackingCurrentlyWorking && (this.getHeadState() == HeadState.OK));
/* filter tracking box data */
/*
* if (this.isLeftEyePositionAvailable()) {
* filteredLeftTrackBox.interpolate(new Vector3f((float)
* schaugenau.eyecontrol.EyeXUtil.getTrackboxLeft_x() - 0.5f, (float)
* schaugenau.eyecontrol.EyeXUtil.getTrackboxLeft_y() - 0.5f, (float)
* schaugenau.eyecontrol.EyeXUtil.getTrackboxLeft_z()),
* this.trackBoxFilterSpeed); }
*
* if (this.isRightEyePositionAvailable()) {
* filteredRightTrackBox.interpolate(new Vector3f((float)
* schaugenau.eyecontrol.EyeXUtil.getTrackboxRight_x() - 0.5f, (float)
* schaugenau.eyecontrol.EyeXUtil.getTrackboxRight_y() - 0.5f, (float)
* schaugenau.eyecontrol.EyeXUtil.getTrackboxRight_z()),
* this.trackBoxFilterSpeed); }
*/ // TODO
return processedData;
}
@Override
public boolean start() {
if (super.start()) {
// schaugenau.eyecontrol.EyeXUtil.startTracking(); // TODO
return true;
} else {
return false;
}
}
@Override
public boolean stop() {
if (super.stop()) {
// schaugenau.eyecontrol.EyeXUtil.stopTracking(); // TODO
return true;
} else {
return false;
}
}
@Override
public void forceStart() {
// schaugenau.eyecontrol.EyeXUtil.startTracking(); // TODO
}
@Override
public void forceStop() {
// schaugenau.eyecontrol.EyeXUtil.stopTracking(); // TODO
}
@Override
public void setCalibrationPoints(double[] points) {
// schaugenau.eyecontrol.EyeXUtil.setCalibrationPoints(points); // TODO
}
@Override
public void collectCalibrationDataAsync() {
// schaugenau.eyecontrol.EyeXUtil.collectCalibrationDataAsync(); // TODO
}
@Override
public void computeCalibrationDataAsync() {
// schaugenau.eyecontrol.EyeXUtil.computeCalibrationDataAsync(); // TODO
}
@Override
public void stopCalibrationAsync() {
// schaugenau.eyecontrol.EyeXUtil.stopCalibrationAsync(); // TODO
}
@Override
public HeadState getHeadState() {
/* return current head state */
/*
* switch (schaugenau.eyecontrol.EyeXUtil.getHeadPosition()) { case -1:
* return HeadState.NOT_DETECTED; case 0: return HeadState.OK; case 1:
* return HeadState.CLOSE; case 2: return HeadState.FAR; default: return
* HeadState.ERROR; }
*/ // TODO
return HeadState.ERROR;
}
@Override
public Vector3f getLeftEyePosition() {
return filteredLeftTrackBox.clone();
}
@Override
public Vector3f getRightEyePosition() {
return filteredRightTrackBox.clone();
}
@Override
public boolean isLeftEyePositionAvailable() {
// return (schaugenau.eyecontrol.EyeXUtil.getTrackboxLeft_z() != 0) &&
// (this.getHeadState() != HeadState.NOT_DETECTED); // TODO
return false;
}
@Override
public boolean isRightEyePositionAvailable() {
// return (schaugenau.eyecontrol.EyeXUtil.getTrackboxRight_z() != 0) &&
// (this.getHeadState() != HeadState.NOT_DETECTED); // TODO
return false;
}
@Override
public boolean isConnected() {
// return (0 != schaugenau.eyecontrol.EyeXUtil.isTrackerConnected()); //
// TODO
return false;
}
}
| |
package fimEntityResolution;
import dnl.utils.text.table.TextTable;
import fimEntityResolution.entityResulution.EntityResolutionFactory;
import fimEntityResolution.entityResulution.EntityResulutionComparisonType;
import fimEntityResolution.entityResulution.IComparison;
import fimEntityResolution.statistics.*;
import fimEntityResolution.statistics.Timer;
import il.ac.technion.ie.context.MfiContext;
import il.ac.technion.ie.data.structure.BitMatrix;
import il.ac.technion.ie.model.*;
import il.ac.technion.ie.output.writers.Writer;
import il.ac.technion.ie.potential.model.AdjustedMatrix;
import il.ac.technion.ie.potential.model.BlockPotential;
import il.ac.technion.ie.potential.model.SharedMatrix;
import il.ac.technion.ie.potential.service.PotentialService;
import il.ac.technion.ie.potential.service.iPotentialService;
import il.ac.technion.ie.search.core.SearchEngine;
import il.ac.technion.ie.search.module.ComparisonInteraction;
import il.ac.technion.ie.service.BlockService;
import il.ac.technion.ie.service.iBlockService;
import il.ac.technion.ie.types.Alg;
import il.ac.technion.ie.types.MFISetsCheckConfiguration;
import il.ac.technion.ie.utils.FrequentItemsetContext;
import il.ac.technion.ie.utils.Utilities;
import org.apache.log4j.Logger;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
import java.util.Map.Entry;
public class BottomUp {
static final Logger logger = Logger.getLogger(BottomUp.class);
private final static String FI_DIR = "FIs";
private final static String TEMP_RECORD_DIR = "TEMP_RECORD_DIR";
private final static File TempDir = new File(TEMP_RECORD_DIR);
private final static double MAX_SUPP_CONST = 1.0;//0.005;
public static BitSet coveredRecords= null;
public static String srcFile = null;
private static double NG_LIMIT = 3;
private static double lastUsedBlockingThreshold;
private static MfiContext context;
/**
* The Main of the MFIBlocking Algorithm
* @param args
* The parameters are as follows:
* 0. Cofiguration - SPARK or DEFAULT
1. Path to the lexicon file created in the previous stage (out parameter 6).
2. The dataset of item ids created in the previous stage (out parameter 2).
3. The set of min_th parameters you would like the algorithm to run on. This parameter is optional and 0 can be provided as the only parameter effectively eliminating the threshold.
4. Path to the generated match file (out parameter 3).
5. path to the debug file containing the items themselves (out parameter 7)
6. The set of min supports to use.
7. Must be set to MFI
8. The set of p parameters to use as the Neighberhood Growth constraints.
9. Blocks output format [S,Path], [B], [N] (S-statistics+blocks with a path of original CSV, B-only blocks, N-no print)
10.Size of the first dataset (optional, only for 2 and more dataset files)
*/
public static void main(String[] args){
context = readArguments(args);
//StringSimToolsLocal.init(context);
enterPerformanceModeIfNeeded( context.isInPerformanceMode() );
createSparkContext( context.getConfig() );
System.out.println("Entered Main");
String currDir = new File(".").getAbsolutePath();
System.out.println("Working dir: " + currDir);
System.out.println("args.length : " + args.length);
System.out.println("Main srcFile : " + srcFile);
long start = System.currentTimeMillis();
RecordSet.readRecords(context);
int numOfRecords = RecordSet.DB_SIZE;
System.out.println("After reading records numOfRecords=" + numOfRecords);
System.out.println("Time to read records " + (System.currentTimeMillis()-start)/1000.0 + " seconds");
//System.out.println("DEBUG: Size of records: " + MemoryUtil.deepMemoryUsageOfAll(RecordSet.values.values(), VisibilityFilter.ALL)/Math.pow(2,30) + " GB");
start = System.currentTimeMillis();
Utilities.parseLexiconFile(context.getLexiconFile(),context.getPrntFormat());
System.out.println("Time to read items (lexicon) " + (System.currentTimeMillis()-start)/1000.0 + " seconds");
//System.out.println("DEBUG: Size of lexicon: " + MemoryUtil.deepMemoryUsageOfAll(Utilities.globalItemsMap.values(), VisibilityFilter.ALL)/Math.pow(2,30) + " GB");
start = System.currentTimeMillis();
mfiBlocksCore();
System.out.println("Total time for algorithm " + (System.currentTimeMillis()-start)/1000.0 + " seconds");
}
private static void createSparkContext(MFISetsCheckConfiguration config) {
if (config.equals(MFISetsCheckConfiguration.SPARK)) {
//System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
//System.setProperty("spark.kryo.registrator", "fimEntityResolution.MyRegistrator");
System.setProperty("spark.executor.memory", "2g");
System.setProperty("hadoop.home.dir", "C:\\workspace\\winutils\\");
//System.getProperty("spark.akka.askTimeout","50000");
Runtime runtime = Runtime.getRuntime();
runtime.gc();
}
}
private static void enterPerformanceModeIfNeeded(boolean inPerformanceMode) {
if( inPerformanceMode ){
System.out.println("You have started the application in profiling mode for performce");
System.out.println("Start your profiler and then hit any key on the console");
try {
System.in.read();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
/**
* The args input looks like the following:
* SPARK [Configuration]
* datasets/lexicon.txt [LexiconFile]
* datasets/ids.txt [RecordsFile]
* 0,0.05,0.1,0.15,0.75,0.8,0.95,1 [MinBlockingThresholds]
* datasets/matching.txt [MatchFile]
* datasets/NoSW.txt [OrigRecordsFileWithoutCommas]
* <PATH_TO_FILE>/originalRecordsFile.csv [OriginalRecordsPath]
* 2 [MinSup]
* 1.5,2,3,5,10,20 [NG]
* N [PrintFormat]
*
* @param args
* @return
*/
private static MfiContext readArguments(String[] args) {
MfiContext context = new MfiContext();
context.setConfiguration(args[0]);
context.setLexiconFile(args[1]);
context.setRecordsFile(args[2]);
context.setMinBlockingThresholds(args[3]);
context.setMatchFile(args[4]);
context.setOrigRecordsFileWithoutCommas(args[5]);
context.setOriginalRecordsPath(args[6]);
context.setDatasetName(args[7]);
context.setMinSup(args[8]);
context.setAlgorithm(Alg.MFI);
context.setNGs(args[9]);
context.setFirstDbSize(args);
context.setPerformanceFlag(args);
context.setPrintFormat(args[10]);
return context;
}
/**
* Core of the MFIBlocks algorithm
*/
public static void mfiBlocksCore() {
int recordsSize = RecordSet.size;
System.out.println("order of minsups used: " + Arrays.toString(context.getMinSup()));
List<BlockingRunResult> blockingRunResults = new ArrayList<>();
//iterate for each neighborhood grow value that was set in input
double[] neighborhoodGrowth = context.getNeighborhoodGrowth();
SearchEngine engine = createAndInitSearchEngine(context.getRecordsFile());
IComparison comparison = EntityResolutionFactory.createComparison(EntityResulutionComparisonType.Jaccard, engine);
for(double neighborhoodGrow: neighborhoodGrowth){
NG_LIMIT = neighborhoodGrow;
double[] minBlockingThresholds = context.getMinBlockingThresholds();
for (double minBlockingThreshold : minBlockingThresholds) { // test for each minimum blocking threshold
coveredRecords = new BitSet(recordsSize+1);
coveredRecords.set(0,true); // no such record
logger.info("running iterative " + context.getAlgName() + "s with minimum blocking threshold " + minBlockingThreshold +
" and NGLimit: " + NG_LIMIT);
System.out.println("running iterative " + context.getAlgName() + "s with minimum blocking threshold " + minBlockingThreshold +
" and NGLimit: " + NG_LIMIT);
Timer timer = new Timer();
//obtain all the clusters that has the minimum score
CandidatePairs algorithmObtainedPairs = getClustersToUse(context, minBlockingThreshold);
timer.startActionTimeMeassurment();
List<Block> algorithmBlocks = findBlocks(algorithmObtainedPairs, true, recordsSize);
Writer.printNeighborsAndBlocks(algorithmObtainedPairs, context, algorithmBlocks);
Map<Integer, List<BlockDescriptor>> blocksAmbiguousRepresentatives = findBlocksAmbiguousRepresentatives(algorithmBlocks, context);
Writer.printAmbiguousRepresentatives(blocksAmbiguousRepresentatives, context);
//Fetching and printing local potential from algorithmBlocks
iPotentialService potentialService = new PotentialService();
List<BlockPotential> localPotential = potentialService.getLocalPotential(algorithmBlocks);
AdjustedMatrix adjustedMatrix = potentialService.getAdjustedMatrix(algorithmBlocks);
List<SharedMatrix> sharedMatrices = potentialService.getSharedMatrices(algorithmBlocks);
Writer.printBlockPotential(localPotential, adjustedMatrix, sharedMatrices, context);
long writeBlocksDuration = timer.getActionTimeDuration();
timer.startActionTimeMeassurment();
TrueClusters trueClusters = new TrueClusters();
trueClusters.findClustersAssingments(context.getMatchFile());
List<Block> trueBlocks = findBlocks(trueClusters.getGroundTruthCandidatePairs(), false, recordsSize);
NonBinaryResults nonBinaryResults = new NonBinaryResults(algorithmBlocks, trueBlocks);
ExperimentResult experimentResult = new ExperimentResult(trueClusters, algorithmObtainedPairs, recordsSize);
StatisticMeasurements results = experimentResult.calculate();
long totalMaxRecallCalculationDuration = timer.getActionTimeDuration();
long timeOfERComparison = comparison.measureComparisonExecution(algorithmObtainedPairs);
double executionTime = calcExecutionTime(timer.getStartTime(), totalMaxRecallCalculationDuration, writeBlocksDuration);
BlockingResultContext resultContext = new BlockingResultContext(results, nonBinaryResults,
minBlockingThreshold, lastUsedBlockingThreshold, NG_LIMIT,
executionTime, Utilities.convertToSeconds(timeOfERComparison));
BlockingRunResult blockingRR = new BlockingRunResult(resultContext);
blockingRunResults.add(blockingRR);
System.out.println("");
System.out.println("");
}
}
if(!blockingRunResults.isEmpty()){
printExperimentMeasurments(blockingRunResults);
String resultsString = writeBlockingRR(blockingRunResults);
System.out.println();
System.out.println(resultsString);
}
else{
System.out.println("Under current configuration, no clustering were achieved!!");
}
}
private static Map<Integer, List<BlockDescriptor>> findBlocksAmbiguousRepresentatives(List<Block> algorithmBlocks, MfiContext context) {
iBlockService blockService = new BlockService();
Map<Integer, List<BlockDescriptor>> ambiguousRepresentatives = blockService.findAmbiguousRepresentatives(algorithmBlocks, context);
return ambiguousRepresentatives;
}
/**
*
* @param candidatePairs
* @param isAlgorithmResults - whether or not to calc probabilities on given CandidatePairs
* @param recordsSize
* @return
*/
private static List<Block> findBlocks(CandidatePairs candidatePairs, boolean isAlgorithmResults, int recordsSize) {
iBlockService blockService = new BlockService();
List<Block> blocks = blockService.getBlocks(candidatePairs, recordsSize);
if (isAlgorithmResults) {
blockService.calcProbOnBlocks(blocks, context);
} else {
blockService.setTrueMatch(blocks);
}
return blocks;
}
private static double calcExecutionTime(long start,
long totalMaxRecallCalculationDuration, long writeBlocksDuration) {
long totalRunTime = System.currentTimeMillis() - start;
totalRunTime = reduceIrrelevantTimes(totalRunTime, totalMaxRecallCalculationDuration, writeBlocksDuration);
double totalRunTimeSeconds = Utilities.convertToSeconds(totalRunTime);
return totalRunTimeSeconds;
}
private static long reduceIrrelevantTimes(long totalRunTime,
long totalMaxRecallCalculationDuration, long writeBlocksDuration) {
return (totalRunTime - (totalMaxRecallCalculationDuration + writeBlocksDuration));
}
private static SearchEngine createAndInitSearchEngine(String recordsFile) {
SearchEngine engine = new SearchEngine(new ComparisonInteraction());
engine.addRecords(recordsFile);
return engine;
}
private static void printExperimentMeasurments( List<BlockingRunResult> blockingRunResults) {
String[] columnNames = {};
if (!blockingRunResults.isEmpty()) {
columnNames = BlockingRunResult.getColumnsNames();
}
Object[][] rows = new Object [blockingRunResults.size()][columnNames.length];
int index = 0;
for (BlockingRunResult blockingRunResult : blockingRunResults) {
Object[] row = blockingRunResult.getValues();
rows[index] = row;
index++;
}
TextTable textTable = new TextTable(columnNames, rows);
textTable.setAddRowNumbering(true);
textTable.printTable();
}
private static CandidatePairs getClustersToUse(MfiContext context, double minBlockingThreshold){
coveredRecords.set(0,true); // no such record
int[] minimumSupports = context.getMinSup();
double[] usedThresholds = new double[minimumSupports.length];
File mfiDir = new File(FI_DIR);
if(!mfiDir.exists()){
if(!mfiDir.mkdir()) {
logger.error("Failed to create directory " + mfiDir.getAbsolutePath());
System.out.println("Failed to create directory " + mfiDir.getAbsolutePath());
}
}
CandidatePairs allResults = new CandidatePairs(); //unlimited
for (int i = (minimumSupports.length - 1); i >= 0 && coveredRecords.cardinality() < RecordSet.size; i--) {
/*
array is sorted in ascending order begin with largest minSup
continue until all records have been covered OR we have completed running over all minSups
*/
long start = System.currentTimeMillis();
//TODO: check content of file
File uncoveredRecordsFile = createRecordFileFromRecords(coveredRecords, minimumSupports[i]);
logProgress("Time to createRecordFileFromRecords" + Double.toString((double) (System.currentTimeMillis() - start) / 1000.0));
start = System.currentTimeMillis();
File mfiFile = Utilities.RunMFIAlg(minimumSupports[i], uncoveredRecordsFile.getAbsolutePath(), mfiDir);
/*File mfiFile = Utilities.RunPFPGrowth(minimumSupports[i],
RecordSet.size-coveredRecords.cardinality(),uncoveredRecordsFile.getAbsolutePath(), mfiDir);*/
logProgress("Time to run MFI with minsup=" + minimumSupports[i] +
" on table of size " + (RecordSet.size - coveredRecords.cardinality()) +
" is " + Double.toString((double) (System.currentTimeMillis() - start) / 1000.0));
start = System.currentTimeMillis();
FrequentItemsetContext itemsetContext = createFrequentItemsetContext( mfiFile.getAbsolutePath(), minBlockingThreshold, minimumSupports[i], context);
CandidatePairs candidatePairs;
if (MFISetsCheckConfiguration.SPARK.equals(context.getConfig())) {
candidatePairs = SparkBlocksReader.readFIs(itemsetContext);
} else {
candidatePairs = Utilities.readFIs(itemsetContext);
}
logProgress("Time to read MFIs: " + Double.toString((double) (System.currentTimeMillis() - start) / 1000.0) + " seconds");
start = System.currentTimeMillis();
BitMatrix coverageMatrix = candidatePairs.exportToBitMatrix();
updateCoveredRecords(coveredRecords, coverageMatrix.getCoveredRows());
logProgress("Time to updateCoveredRecords " + Double.toString((double) (System.currentTimeMillis() - start) / 1000.0) + " seconds");
start = System.currentTimeMillis();
updateCandidatePairs(allResults,candidatePairs );
logProgress("Time to updateBlockingEfficiency " + Double.toString((double) (System.currentTimeMillis() - start) / 1000.0) + " seconds");
usedThresholds[i] = candidatePairs.getMinThresh();
lastUsedBlockingThreshold = candidatePairs.getMinThresh();
logProgress("lastUsedBlockingThreshold: " + lastUsedBlockingThreshold);
logProgress("Number of covered records after running with Minsup=" +
minimumSupports[i] + " is " + coveredRecords.cardinality() + " out of " + RecordSet.size);
}
logProgress("Minsups used " + Arrays.toString(minimumSupports));
logProgress("Total number of covered records under minimum blocking threshold " + minBlockingThreshold +
" and minsups " + Arrays.toString(minimumSupports) + " is: " + coveredRecords.cardinality() + " out of " + RecordSet.size +
" which are: " + 100 * (coveredRecords.cardinality() / RecordSet.size) + "%");
logProgress("After adding uncovered records: Total number of covered records under blocking threshold " + minBlockingThreshold +
" and minsups " + Arrays.toString(minimumSupports) + " is: " + coveredRecords.cardinality() + " out of " + RecordSet.size +
" which are: " + 100 * (coveredRecords.cardinality() / RecordSet.size) + "%");
int firstDbSize = context.getFirstDbSize();
if (firstDbSize>0) {
allResults=removePairsSameSet(allResults,firstDbSize);
}
return allResults;
}
/**
* This method is usage until logger will be productive in code
* @param message
*/
private static void logProgress(String message) {
logger.info(message);
System.out.println(message);
}
private static FrequentItemsetContext createFrequentItemsetContext(
String absolutePath, double minBlockingThreshold, int minimumSupport,
MfiContext mfiContext) {
FrequentItemsetContext itemsetContext = new FrequentItemsetContext();
itemsetContext.setAbsolutePath(absolutePath);
itemsetContext.setMinBlockingThreshold(minBlockingThreshold);
itemsetContext.setMinimumSupport(minimumSupport);
itemsetContext.setMfiContext(mfiContext);
itemsetContext.setGolbalItemsMap(Utilities.globalItemsMap);
itemsetContext.setNeighborhoodGrowthLimit(NG_LIMIT);
return itemsetContext;
}
private static CandidatePairs removePairsSameSet(CandidatePairs actualCPs, int firstDbSize) {
System.out.println("Excluding pairs if records from the same set..");
CandidatePairs updatedPairs=new CandidatePairs();
long start=System.currentTimeMillis();
for (Entry<Integer,RecordMatches> entry: actualCPs.getAllMatches().entrySet()) { //run over all records
for (CandidateMatch cm : entry.getValue().getCandidateMatches()) { //for each record, check out its match
if ( (entry.getKey()>firstDbSize && cm.getRecordId()>firstDbSize) ||
(entry.getKey()<firstDbSize && cm.getRecordId()<firstDbSize))
continue;
// if (entry.getKey()>firstDbSize || cm.getRecordId()>firstDbSize) //for CDDB experiemtns (also add 500 to the command inputs)
// continue;
else
{
updatedPairs.setPair(entry.getKey(), cm.getRecordId(), actualCPs.getMinThresh());
}
}
}
System.out.println("Time exclude pairs : " + Double.toString((double)(System.currentTimeMillis() - start)/1000.0) + " seconds");
return updatedPairs;
}
private static void updateCoveredRecords(BitSet coveredRecords, BitSet coveredRows){
coveredRecords.or(coveredRows);
}
private static File createRecordFileFromRecords(BitSet coveredRecords, int minSup){
File outputFle = null;
System.out.println("Directory TempDir= " + TempDir + " TempDir.getAbsolutePath()" + TempDir.getAbsolutePath());
try {
if(!TempDir.exists()){
if(!TempDir.mkdir()) {
System.out.println("failed to create directory " + TempDir.getAbsolutePath());
}
}
outputFle = File.createTempFile("records", null, TempDir);
System.out.println("retVal= " + outputFle + " retVal.getAbsolutePath()=" + outputFle.getAbsolutePath());
} catch (IOException e1) {
e1.printStackTrace();
}
outputFle.deleteOnExit();
if(outputFle.exists()){
System.out.println("File " + outputFle.getAbsolutePath() + " exists right after deleteOnExit");
}
Map<Integer,Integer> appItems = appitems(coveredRecords, minSup);
BufferedWriter writer = null;
int numOfWrittenLines=0;
try {
outputFle.delete();
outputFle.createNewFile();
writer = new BufferedWriter(new FileWriter(outputFle));
for(int i=coveredRecords.nextClearBit(0); i>=0 && i <= RecordSet.size ; i=coveredRecords.nextClearBit(i+1)){
MfiRecord currMfiRecord = RecordSet.values.get(i);
String toWrite = currMfiRecord.getNumericline(appItems.keySet());
writer.write(toWrite);
writer.newLine();
numOfWrittenLines++;
}
} catch (Exception e) {
e.printStackTrace();
}
finally{
try {
System.out.println("Number of records written: " + numOfWrittenLines);
writer.flush();
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return outputFle;
}
private static Map<Integer,Integer> appitems(BitSet coveredRecords, int minSup){
Map<Integer,Integer> retVal = new HashMap<>();
for( int i=coveredRecords.nextClearBit(0); i>=0 && i <= RecordSet.size ; i=coveredRecords.nextClearBit(i+1) ){
MfiRecord currMfiRecord = RecordSet.values.get(i);
Set<Integer> recordItems = currMfiRecord.getItemsToFrequency().keySet();
for (Integer recorditem : recordItems) {
int itemSuppSize = 1;
if(retVal.containsKey(recorditem)){
itemSuppSize = retVal.get(recorditem) + 1;
}
retVal.put(recorditem, itemSuppSize);
}
}
int origSize = retVal.size();
System.out.println("Number of items before pruning too frequent items: " + origSize);
double DBSize = RecordSet.size - coveredRecords.cardinality();
if(DBSize > 10000){
double removal = ((double)minSup)*DBSize*MAX_SUPP_CONST;
Iterator<Entry<Integer,Integer>> retValIterator = retVal.entrySet().iterator();
while(retValIterator.hasNext()){
Entry<Integer,Integer> currItem = retValIterator.next();
if(currItem.getValue() > removal){
retValIterator.remove();
}
}
}
System.out.println("Number of items AFTER pruning too frequent items: " + retVal.size());
System.out.println("A total of : " + (origSize-retVal.size()) + " items were pruned");
return retVal;
}
private static void updateCandidatePairs(CandidatePairs allResults, final CandidatePairs coveragePairs){
allResults.addAll(coveragePairs);
}
public static String writeBlockingRR(Collection<BlockingRunResult> runResults){
StringBuilder sb = new StringBuilder();
//calculate average, Min & Max for all runs
BlockingResultsSummary brs = new BlockingResultsSummary(runResults);
sb.append(Utilities.NEW_LINE);
sb.append(brs.getSummary()).append(Utilities.NEW_LINE);
sb.append(Utilities.NEW_LINE).append(Utilities.NEW_LINE);
return sb.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal;
import java.util.BitSet;
import java.util.Collection;
import org.apache.ignite.IgniteEncryption;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.cluster.ClusterState;
import org.apache.ignite.internal.managers.discovery.IgniteDiscoverySpi;
import org.apache.ignite.internal.managers.encryption.GridEncryptionManager;
import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi;
import org.apache.ignite.spi.communication.tcp.messages.HandshakeWaitMessage;
import org.apache.ignite.spi.discovery.DiscoverySpi;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_PME_FREE_SWITCH_DISABLED;
import static org.apache.ignite.IgniteSystemProperties.getBoolean;
import static org.apache.ignite.internal.IgniteNodeAttributes.ATTR_IGNITE_FEATURES;
/**
* Defines supported features and check its on other nodes.
*/
public enum IgniteFeatures {
/**
* Support of {@link HandshakeWaitMessage} by {@link TcpCommunicationSpi}.
*/
TCP_COMMUNICATION_SPI_HANDSHAKE_WAIT_MESSAGE(0),
/** Cache metrics v2 support. */
CACHE_METRICS_V2(1),
/** Data paket compression. */
DATA_PACKET_COMPRESSION(3),
/** Support of different rebalance size for nodes. */
DIFFERENT_REBALANCE_POOL_SIZE(4),
/** Support of splitted cache configurations to avoid broken deserialization on non-affinity nodes. */
SPLITTED_CACHE_CONFIGURATIONS(5),
/**
* Support of providing thread dump of thread that started transaction. Used for dumping
* long running transactions.
*/
TRANSACTION_OWNER_THREAD_DUMP_PROVIDING(6),
/** Displaying versbose transaction information: --info option of --tx control script command. */
TX_INFO_COMMAND(7),
/** Command which allow to detect and cleanup garbage which could left after destroying caches in shared groups */
FIND_AND_DELETE_GARBAGE_COMMAND(8),
/** Support of cluster read-only mode. */
CLUSTER_READ_ONLY_MODE(9),
/** Support of suspend/resume operations for pessimistic transactions. */
SUSPEND_RESUME_PESSIMISTIC_TX(10),
/** Distributed metastorage. */
DISTRIBUTED_METASTORAGE(11),
/** The node can communicate with others via socket channel. */
CHANNEL_COMMUNICATION(12),
/** Replacing TcpDiscoveryNode field with nodeId field in discovery messages. */
TCP_DISCOVERY_MESSAGE_NODE_COMPACT_REPRESENTATION(14),
/** LRT system and user time dump settings. */
LRT_SYSTEM_USER_TIME_DUMP_SETTINGS(18),
/** Partition Map Exchange-free switch on baseline node left at fully rebalanced cluster. */
PME_FREE_SWITCH(19),
/** Master key change. See {@link GridEncryptionManager#changeMasterKey(String)}. */
MASTER_KEY_CHANGE(20),
/** ContinuousQuery with security subject id support. */
CONT_QRY_SECURITY_AWARE(21),
/**
* Preventing loss of in-memory data when deactivating the cluster.
*
* @see ClusterState#INACTIVE
*/
SAFE_CLUSTER_DEACTIVATION(22),
/** Persistence caches can be snapshot. */
PERSISTENCE_CACHE_SNAPSHOT(23),
/** Tracing. */
TRACING(26),
/** Distributed change timeout for dump long operations. */
DISTRIBUTED_CHANGE_LONG_OPERATIONS_DUMP_TIMEOUT(30),
/** New region for volatile data. */
VOLATILE_DATA_STRUCTURES_REGION(33),
/** Check secondary indexes inline size on join/by control utility request. */
CHECK_INDEX_INLINE_SIZES(36),
/** Distributed propagation of tx collisions dump interval. */
DISTRIBUTED_TX_COLLISIONS_DUMP(37),
/** Remove metadata from cluster for specified type. */
REMOVE_METADATA(39),
/** Support policy of shutdown. */
SHUTDOWN_POLICY(40),
/** Force rebuild, list or request indexes rebuild status from control script. */
INDEXES_MANIPULATIONS_FROM_CONTROL_SCRIPT(42),
/** Optimization of recovery protocol for cluster which doesn't contain MVCC caches. */
MVCC_TX_RECOVERY_PROTOCOL_V2(44),
/** Pk index keys are applied in correct order. */
SPECIFIED_SEQ_PK_KEYS(45),
/** Compatibility support for new fields which are configured split. */
SPLITTED_CACHE_CONFIGURATIONS_V2(46),
/** Cache encryption key change. See {@link IgniteEncryption#changeCacheGroupKey(Collection)}. */
CACHE_GROUP_KEY_CHANGE(47),
/** Collecting performance statistics. */
PERFORMANCE_STATISTICS(48),
/** Restore cache group from the snapshot. */
SNAPSHOT_RESTORE_CACHE_GROUP(49);
/**
* Unique feature identifier.
*/
private final int featureId;
/**
* @param featureId Feature ID.
*/
IgniteFeatures(int featureId) {
this.featureId = featureId;
}
/**
* @return Feature ID.
*/
public int getFeatureId() {
return featureId;
}
/**
* Checks that feature supported by node.
*
* @param clusterNode Cluster node to check.
* @param feature Feature to check.
* @return {@code True} if feature is declared to be supported by remote node.
*/
public static boolean nodeSupports(ClusterNode clusterNode, IgniteFeatures feature) {
final byte[] features = clusterNode.attribute(ATTR_IGNITE_FEATURES);
if (features == null)
return false;
return nodeSupports(features, feature);
}
/**
* Checks that feature supported by node.
*
* @param featuresAttrBytes Byte array value of supported features node attribute.
* @param feature Feature to check.
* @return {@code True} if feature is declared to be supported by remote node.
*/
public static boolean nodeSupports(byte[] featuresAttrBytes, IgniteFeatures feature) {
int featureId = feature.getFeatureId();
// Same as "BitSet.valueOf(features).get(featureId)"
int byteIdx = featureId >>> 3;
if (byteIdx >= featuresAttrBytes.length)
return false;
int bitIdx = featureId & 0x7;
return (featuresAttrBytes[byteIdx] & (1 << bitIdx)) != 0;
}
/**
* Checks that feature supported by all nodes.
*
* @param nodes cluster nodes to check their feature support.
* @return if feature is declared to be supported by all nodes
*/
public static boolean allNodesSupports(Iterable<ClusterNode> nodes, IgniteFeatures feature) {
for (ClusterNode next : nodes) {
if (!nodeSupports(next, feature))
return false;
}
return true;
}
/**
* Check that feature is supported by all remote nodes.
*
* @param discoSpi Discovery SPI implementation.
* @param feature Feature to check.
* @return {@code True} if all remote nodes support the feature.
*/
public static boolean allNodesSupport(
DiscoverySpi discoSpi,
IgniteFeatures feature
) {
if (discoSpi instanceof IgniteDiscoverySpi)
return ((IgniteDiscoverySpi)discoSpi).allNodesSupport(feature);
else
return allNodesSupports(discoSpi.getRemoteNodes(), feature);
}
/**
* Features supported by the current node.
*
* @return Byte array representing all supported features by current node.
*/
public static byte[] allFeatures() {
final BitSet set = new BitSet();
for (IgniteFeatures value : IgniteFeatures.values()) {
if (value == PME_FREE_SWITCH && getBoolean(IGNITE_PME_FREE_SWITCH_DISABLED))
continue;
final int featureId = value.getFeatureId();
assert !set.get(featureId) : "Duplicate feature ID found for [" + value + "] having same ID ["
+ featureId + "]";
set.set(featureId);
}
return set.toByteArray();
}
}
| |
/*
* Copyright (C) 2010 The MobileSecurePay Project
* All right reserved.
* author: shiqun.shi@alipay.com
*/
package sivl.platform.pay.sdk.alipay.common.sign;
public final class Base64 {
static private final int BASELENGTH = 128;
static private final int LOOKUPLENGTH = 64;
static private final int TWENTYFOURBITGROUP = 24;
static private final int EIGHTBIT = 8;
static private final int SIXTEENBIT = 16;
static private final int FOURBYTE = 4;
static private final int SIGN = -128;
static private final char PAD = '=';
static private final boolean fDebug = false;
static final private byte[] base64Alphabet = new byte[BASELENGTH];
static final private char[] lookUpBase64Alphabet = new char[LOOKUPLENGTH];
static {
for (int i = 0; i < BASELENGTH; ++i) {
base64Alphabet[i] = -1;
}
for (int i = 'Z'; i >= 'A'; i--) {
base64Alphabet[i] = (byte) (i - 'A');
}
for (int i = 'z'; i >= 'a'; i--) {
base64Alphabet[i] = (byte) (i - 'a' + 26);
}
for (int i = '9'; i >= '0'; i--) {
base64Alphabet[i] = (byte) (i - '0' + 52);
}
base64Alphabet['+'] = 62;
base64Alphabet['/'] = 63;
for (int i = 0; i <= 25; i++) {
lookUpBase64Alphabet[i] = (char) ('A' + i);
}
for (int i = 26, j = 0; i <= 51; i++, j++) {
lookUpBase64Alphabet[i] = (char) ('a' + j);
}
for (int i = 52, j = 0; i <= 61; i++, j++) {
lookUpBase64Alphabet[i] = (char) ('0' + j);
}
lookUpBase64Alphabet[62] = (char) '+';
lookUpBase64Alphabet[63] = (char) '/';
}
private static boolean isWhiteSpace(char octect) {
return (octect == 0x20 || octect == 0xd || octect == 0xa || octect == 0x9);
}
private static boolean isPad(char octect) {
return (octect == PAD);
}
private static boolean isData(char octect) {
return (octect < BASELENGTH && base64Alphabet[octect] != -1);
}
/**
* Encodes hex octects into Base64
*
* @param binaryData Array containing binaryData
* @return Encoded Base64 array
*/
public static String encode(byte[] binaryData) {
if (binaryData == null) {
return null;
}
int lengthDataBits = binaryData.length * EIGHTBIT;
if (lengthDataBits == 0) {
return "";
}
int fewerThan24bits = lengthDataBits % TWENTYFOURBITGROUP;
int numberTriplets = lengthDataBits / TWENTYFOURBITGROUP;
int numberQuartet = fewerThan24bits != 0 ? numberTriplets + 1 : numberTriplets;
char encodedData[] = null;
encodedData = new char[numberQuartet * 4];
byte k = 0, l = 0, b1 = 0, b2 = 0, b3 = 0;
int encodedIndex = 0;
int dataIndex = 0;
if (fDebug) {
System.out.println("number of triplets = " + numberTriplets);
}
for (int i = 0; i < numberTriplets; i++) {
b1 = binaryData[dataIndex++];
b2 = binaryData[dataIndex++];
b3 = binaryData[dataIndex++];
if (fDebug) {
System.out.println("b1= " + b1 + ", b2= " + b2 + ", b3= " + b3);
}
l = (byte) (b2 & 0x0f);
k = (byte) (b1 & 0x03);
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0);
byte val2 = ((b2 & SIGN) == 0) ? (byte) (b2 >> 4) : (byte) ((b2) >> 4 ^ 0xf0);
byte val3 = ((b3 & SIGN) == 0) ? (byte) (b3 >> 6) : (byte) ((b3) >> 6 ^ 0xfc);
if (fDebug) {
System.out.println("val2 = " + val2);
System.out.println("k4 = " + (k << 4));
System.out.println("vak = " + (val2 | (k << 4)));
}
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[val2 | (k << 4)];
encodedData[encodedIndex++] = lookUpBase64Alphabet[(l << 2) | val3];
encodedData[encodedIndex++] = lookUpBase64Alphabet[b3 & 0x3f];
}
// form integral number of 6-bit groups
if (fewerThan24bits == EIGHTBIT) {
b1 = binaryData[dataIndex];
k = (byte) (b1 & 0x03);
if (fDebug) {
System.out.println("b1=" + b1);
System.out.println("b1<<2 = " + (b1 >> 2));
}
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0);
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[k << 4];
encodedData[encodedIndex++] = PAD;
encodedData[encodedIndex++] = PAD;
} else if (fewerThan24bits == SIXTEENBIT) {
b1 = binaryData[dataIndex];
b2 = binaryData[dataIndex + 1];
l = (byte) (b2 & 0x0f);
k = (byte) (b1 & 0x03);
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0);
byte val2 = ((b2 & SIGN) == 0) ? (byte) (b2 >> 4) : (byte) ((b2) >> 4 ^ 0xf0);
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[val2 | (k << 4)];
encodedData[encodedIndex++] = lookUpBase64Alphabet[l << 2];
encodedData[encodedIndex++] = PAD;
}
return new String(encodedData);
}
/**
* Decodes Base64 data into octects
*
* @param encoded string containing Base64 data
* @return Array containind decoded data.
*/
public static byte[] decode(String encoded) {
if (encoded == null) {
return null;
}
char[] base64Data = encoded.toCharArray();
// remove white spaces
int len = removeWhiteSpace(base64Data);
if (len % FOURBYTE != 0) {
return null;//should be divisible by four
}
int numberQuadruple = (len / FOURBYTE);
if (numberQuadruple == 0) {
return new byte[0];
}
byte decodedData[] = null;
byte b1 = 0, b2 = 0, b3 = 0, b4 = 0;
char d1 = 0, d2 = 0, d3 = 0, d4 = 0;
int i = 0;
int encodedIndex = 0;
int dataIndex = 0;
decodedData = new byte[(numberQuadruple) * 3];
for (; i < numberQuadruple - 1; i++) {
if (!isData((d1 = base64Data[dataIndex++])) || !isData((d2 = base64Data[dataIndex++]))
|| !isData((d3 = base64Data[dataIndex++]))
|| !isData((d4 = base64Data[dataIndex++]))) {
return null;
}//if found "no data" just return null
b1 = base64Alphabet[d1];
b2 = base64Alphabet[d2];
b3 = base64Alphabet[d3];
b4 = base64Alphabet[d4];
decodedData[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
decodedData[encodedIndex++] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
decodedData[encodedIndex++] = (byte) (b3 << 6 | b4);
}
if (!isData((d1 = base64Data[dataIndex++])) || !isData((d2 = base64Data[dataIndex++]))) {
return null;//if found "no data" just return null
}
b1 = base64Alphabet[d1];
b2 = base64Alphabet[d2];
d3 = base64Data[dataIndex++];
d4 = base64Data[dataIndex++];
if (!isData((d3)) || !isData((d4))) {//Check if they are PAD characters
if (isPad(d3) && isPad(d4)) {
if ((b2 & 0xf) != 0)//last 4 bits should be zero
{
return null;
}
byte[] tmp = new byte[i * 3 + 1];
System.arraycopy(decodedData, 0, tmp, 0, i * 3);
tmp[encodedIndex] = (byte) (b1 << 2 | b2 >> 4);
return tmp;
} else if (!isPad(d3) && isPad(d4)) {
b3 = base64Alphabet[d3];
if ((b3 & 0x3) != 0)//last 2 bits should be zero
{
return null;
}
byte[] tmp = new byte[i * 3 + 2];
System.arraycopy(decodedData, 0, tmp, 0, i * 3);
tmp[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
tmp[encodedIndex] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
return tmp;
} else {
return null;
}
} else { //No PAD e.g 3cQl
b3 = base64Alphabet[d3];
b4 = base64Alphabet[d4];
decodedData[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
decodedData[encodedIndex++] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
decodedData[encodedIndex++] = (byte) (b3 << 6 | b4);
}
return decodedData;
}
/**
* remove WhiteSpace from MIME containing encoded Base64 data.
*
* @param data the byte array of base64 data (with WS)
* @return the new length
*/
private static int removeWhiteSpace(char[] data) {
if (data == null) {
return 0;
}
// count characters that's not whitespace
int newSize = 0;
int len = data.length;
for (int i = 0; i < len; i++) {
if (!isWhiteSpace(data[i])) {
data[newSize++] = data[i];
}
}
return newSize;
}
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.config.materials;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.materials.dependency.DependencyMaterialConfig;
import com.thoughtworks.go.config.materials.git.GitMaterialConfig;
import com.thoughtworks.go.config.materials.mercurial.HgMaterialConfig;
import com.thoughtworks.go.config.materials.perforce.P4MaterialConfig;
import com.thoughtworks.go.config.materials.svn.SvnMaterialConfig;
import com.thoughtworks.go.config.materials.tfs.TfsMaterialConfig;
import com.thoughtworks.go.config.remote.ConfigRepoConfig;
import com.thoughtworks.go.config.remote.FileConfigOrigin;
import com.thoughtworks.go.config.remote.RepoConfigOrigin;
import com.thoughtworks.go.domain.ConfigErrors;
import com.thoughtworks.go.domain.materials.MaterialConfig;
import com.thoughtworks.go.domain.scm.SCMMother;
import com.thoughtworks.go.helper.GoConfigMother;
import com.thoughtworks.go.helper.MaterialConfigsMother;
import com.thoughtworks.go.security.GoCipher;
import com.thoughtworks.go.util.command.UrlArgument;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.startsWith;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.assertThat;
public class MaterialConfigsTest {
private GoConfigMother goConfigMother;
@Before
public void setUp() throws Exception {
goConfigMother = new GoConfigMother();
}
@Test
public void shouldNotAllowMoreThanOneDependencyWithSameName() throws Exception {
CruiseConfig config = GoConfigMother.configWithPipelines("pipeline1", "pipeline2", "pipeline3", "go");
DependencyMaterialConfig one = new DependencyMaterialConfig(new CaseInsensitiveString("sameName"), new CaseInsensitiveString("pipeline2"), new CaseInsensitiveString("stage"));
DependencyMaterialConfig another = new DependencyMaterialConfig(new CaseInsensitiveString("sameName"), new CaseInsensitiveString("pipeline3"), new CaseInsensitiveString("stage"));
MaterialConfigs materialConfigs = new MaterialConfigs(one, another);
ValidationContext validationContext = ValidationContext.forChain(config);
materialConfigs.validate(validationContext);
assertThat(one.errors().isEmpty(), is(false));
assertThat(one.errors().on("materialName"), containsString("You have defined multiple materials called 'sameName'. Material names are case-insensitive and must be unique."));
assertThat(another.errors().isEmpty(), is(false));
assertThat(another.errors().on("materialName"), containsString("You have defined multiple materials called 'sameName'. Material names are case-insensitive and must be unique."));
}
/*
Name
Pipeline X - Material1 - pipeline1
- Material2 - someSvn
- DepMaterial1 - "" dependant on a pipeline named "pipeline1"
Above scenario allowed
*/
@Test
public void shouldNotAllowAnEmptyDepMaterialWhenOtherMaterialsUseThatPipelineName() throws Exception {
CruiseConfig config = GoConfigMother.configWithPipelines("pipeline1", "pipeline2", "pipeline3", "go");
SvnMaterialConfig one = new SvnMaterialConfig("svn://abc", "", "", false);
one.setName(new CaseInsensitiveString("pipeline2"));
DependencyMaterialConfig invalidOne = new DependencyMaterialConfig(new CaseInsensitiveString("pipeline2"), new CaseInsensitiveString("stage"));
MaterialConfigs materials = new MaterialConfigs(one, invalidOne);
ValidationContext validationContext = ValidationContext.forChain(config);
materials.validate(validationContext);
assertThat(invalidOne.errors().isEmpty(), is(false));
assertThat(invalidOne.errors().on("materialName"), is("You have defined multiple materials called 'pipeline2'."
+ " Material names are case-insensitive and must be unique. Note that for dependency materials the default materialName is the name of the upstream pipeline. "
+ "You can override this by setting the materialName explicitly for the upstream pipeline."));
}
@Test
public void shouldReturnValidWhenThereIsNoCycle() throws Exception {
CruiseConfig cruiseConfig = new BasicCruiseConfig();
PipelineConfig pipeline1 = goConfigMother.addPipeline(cruiseConfig, "pipeline1", "stage", "build");
PipelineConfig pipeline2 = goConfigMother.addPipeline(cruiseConfig, "pipeline2", "stage", "build");
goConfigMother.setDependencyOn(cruiseConfig, pipeline2, "pipeline1", "stage");
pipeline1.materialConfigs().validate(ValidationContext.forChain(cruiseConfig));
assertThat(pipeline1.materialConfigs().errors().isEmpty(), is(true));
pipeline2.materialConfigs().validate(ValidationContext.forChain(cruiseConfig));
assertThat(pipeline2.materialConfigs().errors().isEmpty(), is(true));
}
@Test
public void shouldNotAllowToDependOnPipelineDefinedInConfigRepository_WhenDownstreamInFile() throws Exception {
CruiseConfig cruiseConfig = new BasicCruiseConfig();
PipelineConfig pipeline1 = goConfigMother.addPipeline(cruiseConfig, "pipeline1", "stage", "build");
PipelineConfig pipeline2 = goConfigMother.addPipeline(cruiseConfig, "pipeline2", "stage", "build");
goConfigMother.setDependencyOn(cruiseConfig, pipeline2, "pipeline1", "stage");
pipeline1.setOrigin(new RepoConfigOrigin());
pipeline2.setOrigin(new FileConfigOrigin());
pipeline1.materialConfigs().validate(ValidationContext.forChain(cruiseConfig,new BasicPipelineConfigs(),pipeline1));
assertThat(pipeline1.materialConfigs().errors().isEmpty(), is(true));
pipeline2.materialConfigs().validate(ValidationContext.forChain(cruiseConfig,new BasicPipelineConfigs(),pipeline2));
DependencyMaterialConfig invalidDependency = pipeline2.materialConfigs().findDependencyMaterial(new CaseInsensitiveString("pipeline1"));
assertThat(invalidDependency.errors().isEmpty(), is(false));
assertThat(invalidDependency.errors().on(DependencyMaterialConfig.ORIGIN),startsWith("Dependency from pipeline defined in"));
}
@Test
public void shouldAllowToDependOnPipelineDefinedInConfigRepository_WhenInConfigRepository() throws Exception {
CruiseConfig cruiseConfig = new BasicCruiseConfig();
PipelineConfig pipeline1 = goConfigMother.addPipeline(cruiseConfig, "pipeline1", "stage", "build");
PipelineConfig pipeline2 = goConfigMother.addPipeline(cruiseConfig, "pipeline2", "stage", "build");
goConfigMother.setDependencyOn(cruiseConfig, pipeline2, "pipeline1", "stage");
pipeline1.setOrigin(new RepoConfigOrigin(new ConfigRepoConfig(new SvnMaterialConfig("http://mysvn", false), "myplugin"), "123"));
pipeline2.setOrigin(new RepoConfigOrigin(new ConfigRepoConfig(new SvnMaterialConfig("http://othersvn", false), "myplugin"), "2222"));
pipeline1.materialConfigs().validate(ValidationContext.forChain(cruiseConfig,new BasicPipelineConfigs(),pipeline1));
assertThat(pipeline1.materialConfigs().errors().isEmpty(), is(true));
pipeline2.materialConfigs().validate(ValidationContext.forChain(cruiseConfig,new BasicPipelineConfigs(),pipeline2));
DependencyMaterialConfig dep = pipeline2.materialConfigs().findDependencyMaterial(new CaseInsensitiveString("pipeline1"));
assertThat(dep.errors().isEmpty(), is(true));
}
@Test
public void shouldAllowToDependOnPipelineDefinedInFile_WhenInFile() throws Exception {
CruiseConfig cruiseConfig = new BasicCruiseConfig();
PipelineConfig pipeline1 = goConfigMother.addPipeline(cruiseConfig, "pipeline1", "stage", "build");
PipelineConfig pipeline2 = goConfigMother.addPipeline(cruiseConfig, "pipeline2", "stage", "build");
goConfigMother.setDependencyOn(cruiseConfig, pipeline2, "pipeline1", "stage");
pipeline1.setOrigin(new FileConfigOrigin());
pipeline2.setOrigin(new FileConfigOrigin());
pipeline1.materialConfigs().validate(ValidationContext.forChain(cruiseConfig,new BasicPipelineConfigs(),pipeline1));
assertThat(pipeline1.materialConfigs().errors().isEmpty(), is(true));
pipeline2.materialConfigs().validate(ValidationContext.forChain(cruiseConfig,new BasicPipelineConfigs(),pipeline2));
DependencyMaterialConfig dep = pipeline2.materialConfigs().findDependencyMaterial(new CaseInsensitiveString("pipeline1"));
assertThat(dep.errors().isEmpty(), is(true));
}
@Test
public void shouldNotAllowMultipleDependenciesForTheSamePipelines() throws Exception {
CruiseConfig config = GoConfigMother.configWithPipelines("pipeline1", "pipeline2", "pipeline3", "go");
DependencyMaterialConfig dependencyMaterial = new DependencyMaterialConfig(new CaseInsensitiveString("pipeline2"), new CaseInsensitiveString("stage"));
DependencyMaterialConfig duplicateDependencyMaterial = new DependencyMaterialConfig(new CaseInsensitiveString("pipeline2"), new CaseInsensitiveString("stage"));
MaterialConfigs materialConfigs = new MaterialConfigs(dependencyMaterial, duplicateDependencyMaterial);
ValidationContext validationContext = ValidationContext.forChain(config);
materialConfigs.validate(validationContext);
ConfigErrors errors = duplicateDependencyMaterial.errors();
assertThat(errors.isEmpty(), is(false));
assertThat(errors.on("pipelineStageName"),
is("A pipeline can depend on each upstream pipeline only once. Remove one of the occurrences of 'pipeline2' from the current pipeline dependencies."));
}
@Test
public void shouldIgnorePipelineWithEmptyNameInUniquenessCheck() throws Exception {
CruiseConfig config = GoConfigMother.configWithPipelines("pipeline1", "pipeline2", "pipeline3", "go");
DependencyMaterialConfig one = new DependencyMaterialConfig(new CaseInsensitiveString(""), new CaseInsensitiveString("pipeline2"), new CaseInsensitiveString("stage"));
DependencyMaterialConfig another = new DependencyMaterialConfig(new CaseInsensitiveString(""), new CaseInsensitiveString("pipeline3"), new CaseInsensitiveString("stage"));
MaterialConfigs materials = new MaterialConfigs(one, another);
ValidationContext validationContext = ValidationContext.forChain(config);
materials.validate(validationContext);
assertThat(one.errors().isEmpty(), is(true));
assertThat(another.errors().isEmpty(), is(true));
}
@Test
public void shouldReturnTrueWhenDependencyPipelineDoesNotExist() throws Exception {
CruiseConfig cruiseConfig = new BasicCruiseConfig();
PipelineConfig pipelineConfig = goConfigMother.addPipeline(cruiseConfig, "pipeline1", "stage", "build");
goConfigMother.setDependencyOn(cruiseConfig,pipelineConfig, "pipeline2", "stage");
pipelineConfig.materialConfigs().validate(ValidationContext.forChain(cruiseConfig, new BasicPipelineConfigs(),pipelineConfig));
assertThat(pipelineConfig.materialConfigs().errors().isEmpty(), is(true));
}
@Test
public void shouldFailIfAllScmMaterialsInAPipelineHaveSameFolders() throws IOException {
HgMaterialConfig materialOne = new HgMaterialConfig("http://url1", null);
materialOne.setConfigAttributes(Collections.singletonMap(ScmMaterialConfig.FOLDER, "folder1"));
HgMaterialConfig materialTwo = new HgMaterialConfig("http://url2", null);
materialTwo.setConfigAttributes(Collections.singletonMap(ScmMaterialConfig.FOLDER, "folder1"));
PluggableSCMMaterialConfig materialThree = new PluggableSCMMaterialConfig(null, SCMMother.create("scm-id"), "folder1", null);
CruiseConfig config = GoConfigMother.configWithPipelines("one");
PipelineConfig pipelineOne = config.pipelineConfigByName(new CaseInsensitiveString("one"));
pipelineOne.setMaterialConfigs(new MaterialConfigs(materialOne, materialTwo, materialThree));
pipelineOne.materialConfigs().validate(ValidationContext.forChain(config));
String conflictingDirMessage = "Invalid Destination Directory. Every material needs a different destination directory and the directories should not be nested.";
assertThat(pipelineOne.materialConfigs().get(0).errors().on(ScmMaterialConfig.FOLDER), is(conflictingDirMessage));
assertThat(pipelineOne.materialConfigs().get(1).errors().on(ScmMaterialConfig.FOLDER), is(conflictingDirMessage));
assertThat(pipelineOne.materialConfigs().get(2).errors().on(PluggableSCMMaterialConfig.FOLDER), is(conflictingDirMessage));
}
@Test
public void shouldNotFailIfAllScmMaterialsInAPipelineHaveDifferentFolders() {
HgMaterialConfig materialOne = new HgMaterialConfig("http://url1", null);
materialOne.setConfigAttributes(Collections.singletonMap(ScmMaterialConfig.FOLDER, "folder1"));
HgMaterialConfig materialTwo = new HgMaterialConfig("http://url2", null);
materialTwo.setConfigAttributes(Collections.singletonMap(ScmMaterialConfig.FOLDER, "folder2"));
CruiseConfig config = GoConfigMother.configWithPipelines("one");
PipelineConfig pipelineOne = config.pipelineConfigByName(new CaseInsensitiveString("one"));
pipelineOne.setMaterialConfigs(new MaterialConfigs(materialOne, materialTwo));
pipelineOne.materialConfigs().validate(ValidationContext.forChain(config));
assertThat(pipelineOne.materialConfigs().get(0).errors().isEmpty(), is(true));
assertThat(pipelineOne.materialConfigs().get(1).errors().isEmpty(), is(true));
}
@Test
public void shouldReturnNullWhenMaterialNotFoundForTheGivenFingerPrint() {
CruiseConfig cruiseConfig = new BasicCruiseConfig();
PipelineConfig pipeline = goConfigMother.addPipeline(cruiseConfig, "pipeline1", "stage", "build");
assertThat(pipeline.materialConfigs().getByFingerPrint("invalid"), is(nullValue()));
}
@Test
public void shouldFailIfMultipleMaterialsDoNotHaveDestinationFolderSet() {
HgMaterialConfig materialConfigOne = new HgMaterialConfig("http://url1", null);
materialConfigOne.setConfigAttributes(Collections.singletonMap(ScmMaterialConfig.FOLDER, "folder"));
HgMaterialConfig materialConfigTwo = new HgMaterialConfig("http://url2", null);
PluggableSCMMaterialConfig materialConfigThree = new PluggableSCMMaterialConfig(null, SCMMother.create("scm-id"), null, null);
CruiseConfig config = GoConfigMother.configWithPipelines("one");
PipelineConfig pipelineOne = config.pipelineConfigByName(new CaseInsensitiveString("one"));
pipelineOne.setMaterialConfigs((new MaterialConfigs(materialConfigOne, materialConfigTwo, materialConfigThree)));
pipelineOne.materialConfigs().validate(ValidationContext.forChain(config));
assertThat(pipelineOne.materialConfigs().get(0).errors().isEmpty(), is(true));
assertThat(pipelineOne.materialConfigs().get(1).errors().on(ScmMaterialConfig.FOLDER), is("Destination directory is required when specifying multiple scm materials"));
assertThat(pipelineOne.materialConfigs().get(2).errors().on(PluggableSCMMaterialConfig.FOLDER), is("Destination directory is required when specifying multiple scm materials"));
}
@Test
public void shouldNotFailIfMultipleMaterialsHaveUniqueDestinationFolderSet() {
HgMaterialConfig materialOne = new HgMaterialConfig("http://url1", null);
materialOne.setConfigAttributes(Collections.singletonMap(ScmMaterialConfig.FOLDER, "folder"));
HgMaterialConfig materialTwo = new HgMaterialConfig("http://url2", null);
materialTwo.setConfigAttributes(Collections.singletonMap(ScmMaterialConfig.FOLDER, "folder2"));
CruiseConfig config = GoConfigMother.configWithPipelines("one");
PipelineConfig pipelineOne = config.pipelineConfigByName(new CaseInsensitiveString("one"));
pipelineOne.setMaterialConfigs(new MaterialConfigs(materialOne, materialTwo));
pipelineOne.materialConfigs().validate(ValidationContext.forChain(config));
assertThat(pipelineOne.materialConfigs().get(0).errors().isEmpty(), is(true));
assertThat(pipelineOne.materialConfigs().get(1).errors().isEmpty(), is(true));
}
@Test
public void shouldCheckSCMMaterialsHaveDestinationCorrectly() {
HgMaterialConfig materialConfigOne = new HgMaterialConfig("http://url1", null);
materialConfigOne.setConfigAttributes(Collections.singletonMap(ScmMaterialConfig.FOLDER, "folder"));
CruiseConfig config = GoConfigMother.configWithPipelines("one");
PipelineConfig pipelineOne = config.pipelineConfigByName(new CaseInsensitiveString("one"));
pipelineOne.setMaterialConfigs((new MaterialConfigs(materialConfigOne)));
assertThat(pipelineOne.materialConfigs().scmMaterialsHaveDestination(), is(true));
PluggableSCMMaterialConfig materialConfigTwo = new PluggableSCMMaterialConfig(null, SCMMother.create("scm-id"), null, null);
pipelineOne.materialConfigs().add(materialConfigTwo);
assertThat(pipelineOne.materialConfigs().scmMaterialsHaveDestination(), is(false));
}
@Test
public void shouldShowAutoUpdateMismatchErrorTwiceWhenMaterialIsAddedToSamePipeline() throws Exception {
HgMaterialConfig materialOne = new HgMaterialConfig("http://url1", null);
materialOne.setConfigAttributes(Collections.singletonMap(ScmMaterialConfig.FOLDER, "some-folder"));
materialOne.setAutoUpdate(true);
HgMaterialConfig materialTwo = new HgMaterialConfig("http://url1", null);
materialTwo.setConfigAttributes(Collections.singletonMap(ScmMaterialConfig.FOLDER, "some-folder-2"));
materialTwo.setAutoUpdate(false);
CruiseConfig config = GoConfigMother.configWithPipelines("one");
PipelineConfig pipelineOne = config.pipelineConfigByName(new CaseInsensitiveString("one"));
pipelineOne.setMaterialConfigs(new MaterialConfigs(materialOne, materialTwo));
pipelineOne.materialConfigs().validate(ValidationContext.forChain(config));
assertThat(pipelineOne.materialConfigs().get(0).errors().getAll().size(), is(1));
assertThat(pipelineOne.materialConfigs().get(1).errors().getAll().size(), is(1));
}
@Test
public void shouldNotThrowUpOnMaterialIfAutoUpdateValuesAreCorrect() throws Exception {
HgMaterialConfig materialOne = new HgMaterialConfig("http://url1", null);
materialOne.setAutoUpdate(true);
HgMaterialConfig materialTwo = new HgMaterialConfig("http://url1", null);
materialTwo.setAutoUpdate(true);
CruiseConfig config = GoConfigMother.configWithPipelines("one", "two", "three");
PipelineConfig pipelineOne = config.pipelineConfigByName(new CaseInsensitiveString("one"));
pipelineOne.setMaterialConfigs(new MaterialConfigs(materialOne));
PipelineConfig pipelineTwo = config.pipelineConfigByName(new CaseInsensitiveString("two"));
pipelineTwo.setMaterialConfigs(new MaterialConfigs(materialTwo));
pipelineOne.materialConfigs().validate(ValidationContext.forChain(config));
assertThat(pipelineOne.materialConfigs().get(0).errors().isEmpty(), is(true));
assertThat(pipelineTwo.materialConfigs().get(0).errors().isEmpty(), is(true));
}
@Test
public void shouldReturnMaterialBasedOnPiplineUniqueFingerPrint() {
CruiseConfig cruiseConfig = new BasicCruiseConfig();
PipelineConfig pipeline1 = goConfigMother.addPipeline(cruiseConfig, "pipeline1", "stage", "build");
HgMaterialConfig expectedMaterial = MaterialConfigsMother.hgMaterialConfig();
pipeline1.addMaterialConfig(expectedMaterial);
pipeline1.addMaterialConfig(MaterialConfigsMother.gitMaterialConfig("url"));
pipeline1.addMaterialConfig(MaterialConfigsMother.svnMaterialConfig("url", "folder"));
MaterialConfig actualMaterialConfig = pipeline1.materialConfigs().getByFingerPrint(expectedMaterial.getPipelineUniqueFingerprint());
assertThat((HgMaterialConfig) actualMaterialConfig, is(expectedMaterial));
}
@Test
public void shouldReturnTrueWhenNoDependencyDefined() throws Exception {
CruiseConfig cruiseConfig = new BasicCruiseConfig();
PipelineConfig pipelineConfig = goConfigMother.addPipeline(cruiseConfig, "pipeline1", "stage", "build");
goConfigMother.addPipeline(cruiseConfig, "pipeline2", "stage", "build");
pipelineConfig.materialConfigs().validate(ValidationContext.forChain(cruiseConfig));
assertThat(pipelineConfig.materialConfigs().errors().isEmpty(), is(true));
}
@Test
public void shouldAddErrorOnMaterialIfAutoUpdateDoesNotMatchAcrossFingerPrint() throws Exception {
HgMaterialConfig materialOne = new HgMaterialConfig("http://url1", null);
materialOne.setAutoUpdate(false);
HgMaterialConfig materialTwo = new HgMaterialConfig("http://url1", null);
materialTwo.setAutoUpdate(true);
CruiseConfig config = GoConfigMother.configWithPipelines("one", "two");
PipelineConfig pipelineOne = config.pipelineConfigByName(new CaseInsensitiveString("one"));
pipelineOne.setMaterialConfigs(new MaterialConfigs(materialOne));
PipelineConfig pipelineTwo = config.pipelineConfigByName(new CaseInsensitiveString("two"));
pipelineTwo.setMaterialConfigs(new MaterialConfigs(materialTwo));
pipelineOne.materialConfigs().validate(ValidationContext.forChain(config));
assertThat(pipelineOne.materialConfigs().get(0).errors().on(ScmMaterialConfig.AUTO_UPDATE),
is("Material of type Mercurial (http://url1) is specified more than once in the configuration with different values for the autoUpdate attribute. All copies of the a material should have the same value for this attribute."));
}
@Test
public void shouldNotRunMultipleMaterialsValidationIfPipelineContainsOnlyOneMaterial() {
CruiseConfig config = GoConfigMother.configWithPipelines("one");
PipelineConfig pipelineOne = config.pipelineConfigByName(new CaseInsensitiveString("one"));
SvnMaterialConfig svnMaterialConfig = MaterialConfigsMother.svnMaterialConfig();
svnMaterialConfig.setFolder(null);
pipelineOne.setMaterialConfigs(new MaterialConfigs(svnMaterialConfig));
pipelineOne.materialConfigs().validate(ValidationContext.forChain(config));
assertThat(svnMaterialConfig.errors().toString(), svnMaterialConfig.errors().getAll().size(), is(0));
}
@Test
public void shouldSetSvnConfigAttributesForMaterial() {
MaterialConfigs materialConfigs = new MaterialConfigs();
Map<String, Object> svnAttrMap = new HashMap<String, Object>();
svnAttrMap.put(SvnMaterialConfig.URL, "foo");
svnAttrMap.put(SvnMaterialConfig.USERNAME, "bar");
svnAttrMap.put(SvnMaterialConfig.PASSWORD, "baz");
svnAttrMap.put(SvnMaterialConfig.CHECK_EXTERNALS, false);
Map<String, Object> attributeMap = new HashMap<String, Object>();
attributeMap.put(AbstractMaterialConfig.MATERIAL_TYPE, SvnMaterialConfig.TYPE);
attributeMap.put(SvnMaterialConfig.TYPE, svnAttrMap);
materialConfigs.setConfigAttributes(attributeMap);
assertThat((SvnMaterialConfig) materialConfigs.first(), is(new SvnMaterialConfig("foo", "bar", "baz", false)));
}
@Test
public void shouldSetTfsConfigAttributesForMaterial() {
MaterialConfigs materialConfigs = new MaterialConfigs();
Map<String, String> tfsAttrMap = new HashMap<String, String>();
tfsAttrMap.put(TfsMaterialConfig.URL, "foo");
tfsAttrMap.put(TfsMaterialConfig.USERNAME, "bar");
tfsAttrMap.put(TfsMaterialConfig.PASSWORD, "baz");
tfsAttrMap.put(TfsMaterialConfig.PROJECT_PATH, "to_hell");
tfsAttrMap.put(TfsMaterialConfig.MATERIAL_NAME, "crapy_material");
tfsAttrMap.put(TfsMaterialConfig.DOMAIN, "CORPORATE");
Map<String, Object> attributeMap = new HashMap<String, Object>();
attributeMap.put(AbstractMaterialConfig.MATERIAL_TYPE, TfsMaterialConfig.TYPE);
attributeMap.put(TfsMaterialConfig.TYPE, tfsAttrMap);
materialConfigs.setConfigAttributes(attributeMap);
TfsMaterialConfig tfsMaterialConfig = new TfsMaterialConfig(new GoCipher(), new UrlArgument("foo"), "bar", "CORPORATE", "baz", "to_hell");
tfsMaterialConfig.setName(new CaseInsensitiveString("crapy_material"));
assertThat((TfsMaterialConfig) materialConfigs.first(), is(tfsMaterialConfig));
assertThat(tfsMaterialConfig.getPassword(), is("baz"));
}
@Test
public void shouldClearExistingAndSetHgConfigAttributesForMaterial() {
MaterialConfigs materialConfigs = new MaterialConfigs();
materialConfigs.add(new HgMaterialConfig("", null));
materialConfigs.add(new SvnMaterialConfig("", "", "", false));
Map<String, String> hashMap = new HashMap<String, String>();
hashMap.put(HgMaterialConfig.URL, "foo");
Map<String, Object> attributeMap = new HashMap<String, Object>();
attributeMap.put(AbstractMaterialConfig.MATERIAL_TYPE, HgMaterialConfig.TYPE);
attributeMap.put(HgMaterialConfig.TYPE, hashMap);
materialConfigs.setConfigAttributes(attributeMap);
assertThat(materialConfigs.size(), is(1));
assertThat((HgMaterialConfig) materialConfigs.first(), is(new HgMaterialConfig("foo", null)));
}
@Test
public void shouldSetGitConfigAttributesForMaterial() {
MaterialConfigs materialConfigs = new MaterialConfigs();
Map<String, String> hashMap = new HashMap<String, String>();
hashMap.put(GitMaterialConfig.URL, "foo");
hashMap.put(GitMaterialConfig.BRANCH, "master");
HashMap<String, Object> attributeMap = new HashMap<String, Object>();
attributeMap.put(AbstractMaterialConfig.MATERIAL_TYPE, GitMaterialConfig.TYPE);
attributeMap.put(GitMaterialConfig.TYPE, hashMap);
materialConfigs.setConfigAttributes(attributeMap);
assertThat(materialConfigs.size(), is(1));
GitMaterialConfig expected = new GitMaterialConfig("foo");
expected.setConfigAttributes(Collections.singletonMap(GitMaterialConfig.BRANCH, "master"));
assertThat((GitMaterialConfig) materialConfigs.first(), is(expected));
}
@Test
public void shouldSetP4ConfigAttributesForMaterial() {
MaterialConfigs materialConfigs = new MaterialConfigs();
Map<String, String> hashMap = new HashMap<String, String>();
hashMap.put(P4MaterialConfig.SERVER_AND_PORT, "localhost:1666");
hashMap.put(P4MaterialConfig.USERNAME, "username");
hashMap.put(P4MaterialConfig.PASSWORD, "password");
hashMap.put(P4MaterialConfig.VIEW, "foo");
Map<String, Object> attributeMap = new HashMap<String, Object>();
attributeMap.put(AbstractMaterialConfig.MATERIAL_TYPE, P4MaterialConfig.TYPE);
attributeMap.put(P4MaterialConfig.TYPE, hashMap);
materialConfigs.setConfigAttributes(attributeMap);
assertThat(materialConfigs.size(), is(1));
P4MaterialConfig expected = new P4MaterialConfig("localhost:1666", "foo", "username");
expected.setPassword("password");
assertThat((P4MaterialConfig) materialConfigs.first(), is(expected));
}
@Test
public void shouldSetDependencyMaterialConfigAttributesForMaterial() {
MaterialConfigs materialConfigs = new MaterialConfigs();
Map<String, String> hashMap = new HashMap<String, String>();
hashMap.put(DependencyMaterialConfig.PIPELINE_STAGE_NAME, "blah [foo]");
Map<String, Object> attributeMap = new HashMap<String, Object>();
attributeMap.put(AbstractMaterialConfig.MATERIAL_TYPE, DependencyMaterialConfig.TYPE);
attributeMap.put(DependencyMaterialConfig.TYPE, hashMap);
materialConfigs.setConfigAttributes(attributeMap);
assertThat(materialConfigs.size(), is(1));
DependencyMaterialConfig expected = new DependencyMaterialConfig(new CaseInsensitiveString("blah"), new CaseInsensitiveString("foo"));
assertThat((DependencyMaterialConfig) materialConfigs.first(), is(expected));
}
@Test
public void shouldSetPackageMaterialConfigAttributesForMaterial() {
Map<String, String> hashMap = new HashMap<String, String>();
String packageId = "some-id";
hashMap.put(PackageMaterialConfig.PACKAGE_ID, packageId);
Map<String, Object> attributeMap = new HashMap<String, Object>();
attributeMap.put(AbstractMaterialConfig.MATERIAL_TYPE, PackageMaterialConfig.TYPE);
attributeMap.put(PackageMaterialConfig.TYPE, hashMap);
MaterialConfigs materialConfigs = new MaterialConfigs();
materialConfigs.setConfigAttributes(attributeMap);
assertThat(materialConfigs.size(), is(1));
assertThat(((PackageMaterialConfig) materialConfigs.first()).getPackageId(), is(packageId));
}
@Test
public void shouldGetExistingOrDefaultMaterialCorrectly() {
SvnMaterialConfig svn = new SvnMaterialConfig("http://test.com", false);
PackageMaterialConfig p1 = new PackageMaterialConfig("p1");
PackageMaterialConfig p2 = new PackageMaterialConfig("p2");
assertThat(new MaterialConfigs(svn, p2).getExistingOrDefaultMaterial(p1).getPackageId(), is("p2"));
assertThat(new MaterialConfigs(svn).getExistingOrDefaultMaterial(p1).getPackageId(), is("p1"));
}
@Test
public void shouldSetPluggableSCMMaterialConfigAttributesForMaterial() {
String scmId = "scm-id";
Map<String, String> hashMap = new HashMap<String, String>();
hashMap.put(PluggableSCMMaterialConfig.SCM_ID, scmId);
Map<String, Object> attributeMap = new HashMap<String, Object>();
attributeMap.put(AbstractMaterialConfig.MATERIAL_TYPE, PluggableSCMMaterialConfig.TYPE);
attributeMap.put(PluggableSCMMaterialConfig.TYPE, hashMap);
MaterialConfigs materialConfigs = new MaterialConfigs();
materialConfigs.setConfigAttributes(attributeMap);
assertThat(materialConfigs.size(), is(1));
assertThat(((PluggableSCMMaterialConfig) materialConfigs.first()).getScmId(), is(scmId));
}
@Test
public void shouldGetExistingOrDefaultPluggableSCMMaterialCorrectly() {
SvnMaterialConfig svn = new SvnMaterialConfig("http://test.com", false);
PluggableSCMMaterialConfig pluggableSCMMaterialOne = new PluggableSCMMaterialConfig("scm-id-1");
PluggableSCMMaterialConfig pluggableSCMMaterialTwo = new PluggableSCMMaterialConfig("scm-id-2");
assertThat(new MaterialConfigs(svn, pluggableSCMMaterialTwo).getExistingOrDefaultMaterial(pluggableSCMMaterialOne).getScmId(), is("scm-id-2"));
assertThat(new MaterialConfigs(svn).getExistingOrDefaultMaterial(pluggableSCMMaterialOne).getScmId(), is("scm-id-1"));
}
}
| |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.collection.primitive.hopscotch;
import org.junit.experimental.theories.DataPoint;
import org.junit.experimental.theories.Theories;
import org.junit.experimental.theories.Theory;
import org.junit.runner.RunWith;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
import org.neo4j.collection.primitive.Primitive;
import org.neo4j.collection.primitive.PrimitiveCollection;
import org.neo4j.collection.primitive.PrimitiveIntLongMap;
import org.neo4j.collection.primitive.PrimitiveIntObjectMap;
import org.neo4j.collection.primitive.PrimitiveIntSet;
import org.neo4j.collection.primitive.PrimitiveLongIntMap;
import org.neo4j.collection.primitive.PrimitiveLongLongMap;
import org.neo4j.collection.primitive.PrimitiveLongObjectMap;
import org.neo4j.collection.primitive.PrimitiveLongSet;
import org.neo4j.function.Factory;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isOneOf;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assume.assumeTrue;
@SuppressWarnings( "unchecked" )
@RunWith( Theories.class )
public class PrimitiveCollectionEqualityTest
{
private static interface Value<T extends PrimitiveCollection>
{
void add( T coll );
/**
* @return 'true' if what was removed was exactly the value that was put in.
*/
boolean remove( T coll );
}
private static abstract class ValueProducer<T extends PrimitiveCollection>
{
private final Class<T> applicableType;
public ValueProducer( Class<T> applicableType )
{
this.applicableType = applicableType;
}
public boolean isApplicable( Factory<? extends PrimitiveCollection> factory )
{
try ( PrimitiveCollection coll = factory.newInstance() )
{
return applicableType.isInstance( coll );
}
}
public abstract Value<T> randomValue();
}
// ==== Test Value Producers ====
@DataPoint
public static ValueProducer<PrimitiveIntSet> intV = new ValueProducer<PrimitiveIntSet>( PrimitiveIntSet.class )
{
@Override
public Value<PrimitiveIntSet> randomValue()
{
final int x = randomInt();
return new Value<PrimitiveIntSet>()
{
@Override
public void add( PrimitiveIntSet coll )
{
coll.add( x );
}
@Override
public boolean remove( PrimitiveIntSet coll )
{
return coll.remove( x );
}
};
}
};
@DataPoint
public static ValueProducer<PrimitiveLongSet> longV =
new ValueProducer<PrimitiveLongSet>( PrimitiveLongSet.class )
{
@Override
public Value<PrimitiveLongSet> randomValue()
{
final long x = randomLong();
return new Value<PrimitiveLongSet>()
{
@Override
public void add( PrimitiveLongSet coll )
{
coll.add( x );
}
@Override
public boolean remove( PrimitiveLongSet coll )
{
return coll.remove( x );
}
};
}
};
@DataPoint
public static ValueProducer<PrimitiveIntLongMap> intLongV = new ValueProducer<PrimitiveIntLongMap>(
PrimitiveIntLongMap.class )
{
@Override
public Value<PrimitiveIntLongMap> randomValue()
{
final int x = randomInt();
final long y = randomLong();
return new Value<PrimitiveIntLongMap>()
{
@Override
public void add( PrimitiveIntLongMap coll )
{
coll.put( x, y );
}
@Override
public boolean remove( PrimitiveIntLongMap coll )
{
return coll.remove( x ) == y;
}
};
}
};
@DataPoint
public static ValueProducer<PrimitiveLongIntMap> longIntV = new ValueProducer<PrimitiveLongIntMap>(
PrimitiveLongIntMap.class )
{
@Override
public Value<PrimitiveLongIntMap> randomValue()
{
final long x = randomLong();
final int y = randomInt();
return new Value<PrimitiveLongIntMap>()
{
@Override
public void add( PrimitiveLongIntMap coll )
{
coll.put( x, y );
}
@Override
public boolean remove( PrimitiveLongIntMap coll )
{
return coll.remove( x ) == y;
}
};
}
};
@DataPoint
public static ValueProducer<PrimitiveLongLongMap> longLongV = new ValueProducer<PrimitiveLongLongMap>(
PrimitiveLongLongMap.class )
{
@Override
public Value<PrimitiveLongLongMap> randomValue()
{
final long x = randomLong();
final long y = randomLong();
return new Value<PrimitiveLongLongMap>()
{
@Override
public void add( PrimitiveLongLongMap coll )
{
coll.put( x, y );
}
@Override
public boolean remove( PrimitiveLongLongMap coll )
{
return coll.remove( x ) == y;
}
};
}
};
@DataPoint
public static ValueProducer<PrimitiveIntObjectMap> intObjV =
new ValueProducer<PrimitiveIntObjectMap>( PrimitiveIntObjectMap.class )
{
@Override
public Value<PrimitiveIntObjectMap> randomValue()
{
final int x = randomInt();
final Object y = new Object();
return new Value<PrimitiveIntObjectMap>()
{
@Override
public void add( PrimitiveIntObjectMap coll )
{
coll.put( x, y );
}
@Override
public boolean remove( PrimitiveIntObjectMap coll )
{
return coll.remove( x ) == y;
}
};
}
};
@DataPoint
public static ValueProducer<PrimitiveLongObjectMap> longObjV =
new ValueProducer<PrimitiveLongObjectMap>( PrimitiveLongObjectMap.class )
{
@Override
public Value<PrimitiveLongObjectMap> randomValue()
{
final long x = randomLong();
final Object y = new Object();
return new Value<PrimitiveLongObjectMap>()
{
@Override
public void add( PrimitiveLongObjectMap coll )
{
coll.put( x, y );
}
@Override
public boolean remove( PrimitiveLongObjectMap coll )
{
return coll.remove( x ) == y;
}
};
}
};
// ==== Primitive Collection Implementations ====
@DataPoint
public static Factory<PrimitiveIntSet> intSet = new Factory<PrimitiveIntSet>()
{
@Override
public PrimitiveIntSet newInstance()
{
return Primitive.intSet();
}
};
@DataPoint
public static Factory<PrimitiveIntSet> intSetWithCapacity = new Factory<PrimitiveIntSet>()
{
@Override
public PrimitiveIntSet newInstance()
{
return Primitive.intSet( randomCapacity() );
}
};
@DataPoint
public static Factory<PrimitiveIntSet> offheapIntSet = new Factory<PrimitiveIntSet>()
{
@Override
public PrimitiveIntSet newInstance()
{
return Primitive.offHeapIntSet();
}
};
@DataPoint
public static Factory<PrimitiveIntSet> offheapIntSetWithCapacity = new Factory<PrimitiveIntSet>()
{
@Override
public PrimitiveIntSet newInstance()
{
return Primitive.offHeapIntSet( randomCapacity() );
}
};
@DataPoint
public static Factory<PrimitiveLongSet> longSet = new Factory<PrimitiveLongSet>()
{
@Override
public PrimitiveLongSet newInstance()
{
return Primitive.longSet();
}
};
@DataPoint
public static Factory<PrimitiveLongSet> longSetWithCapacity = new Factory<PrimitiveLongSet>()
{
@Override
public PrimitiveLongSet newInstance()
{
return Primitive.longSet( randomCapacity() );
}
};
@DataPoint
public static Factory<PrimitiveLongSet> offheapLongSet = new Factory<PrimitiveLongSet>()
{
@Override
public PrimitiveLongSet newInstance()
{
return Primitive.offHeapLongSet();
}
};
@DataPoint
public static Factory<PrimitiveLongSet> offheapLongSetWithCapacity = new Factory<PrimitiveLongSet>()
{
@Override
public PrimitiveLongSet newInstance()
{
return Primitive.offHeapLongSet( randomCapacity() );
}
};
@DataPoint
public static Factory<PrimitiveIntLongMap> intLongMap = new Factory<PrimitiveIntLongMap>()
{
@Override
public PrimitiveIntLongMap newInstance()
{
return Primitive.intLongMap();
}
};
@DataPoint
public static Factory<PrimitiveIntLongMap> intLongMapWithCapacity = new Factory<PrimitiveIntLongMap>()
{
@Override
public PrimitiveIntLongMap newInstance()
{
return Primitive.intLongMap( randomCapacity() );
}
};
@DataPoint
public static Factory<PrimitiveLongIntMap> longIntMap = new Factory<PrimitiveLongIntMap>()
{
@Override
public PrimitiveLongIntMap newInstance()
{
return Primitive.longIntMap();
}
};
@DataPoint
public static Factory<PrimitiveLongIntMap> longIntMapWithCapacity = new Factory<PrimitiveLongIntMap>()
{
@Override
public PrimitiveLongIntMap newInstance()
{
return Primitive.longIntMap( randomCapacity() );
}
};
@DataPoint
public static Factory<PrimitiveLongLongMap> offheapLongLongMap = new Factory<PrimitiveLongLongMap>()
{
@Override
public PrimitiveLongLongMap newInstance()
{
return Primitive.offHeapLongLongMap();
}
};
@DataPoint
public static Factory<PrimitiveLongLongMap> offheapLongLongMapWithCapacity = new Factory<PrimitiveLongLongMap>()
{
@Override
public PrimitiveLongLongMap newInstance()
{
return Primitive.offHeapLongLongMap( randomCapacity() );
}
};
@DataPoint
public static Factory<PrimitiveIntObjectMap> intObjMap = new Factory<PrimitiveIntObjectMap>()
{
@Override
public PrimitiveIntObjectMap newInstance()
{
return Primitive.intObjectMap();
}
};
@DataPoint
public static Factory<PrimitiveIntObjectMap> intObjMapWithCapacity = new Factory<PrimitiveIntObjectMap>()
{
@Override
public PrimitiveIntObjectMap newInstance()
{
return Primitive.intObjectMap( randomCapacity() );
}
};
@DataPoint
public static Factory<PrimitiveLongObjectMap> longObjectMap = new Factory<PrimitiveLongObjectMap>()
{
@Override
public PrimitiveLongObjectMap newInstance()
{
return Primitive.longObjectMap();
}
};
@DataPoint
public static Factory<PrimitiveLongObjectMap> longObjectMapWithCapacity = new Factory<PrimitiveLongObjectMap>()
{
@Override
public PrimitiveLongObjectMap newInstance()
{
return Primitive.longObjectMap( randomCapacity() );
}
};
private static final PrimitiveIntSet observedRandomInts = Primitive.intSet();
private static final PrimitiveLongSet observedRandomLongs = Primitive.longSet();
/**
* Produce a random int that hasn't been seen before by any test.
*/
private static int randomInt()
{
int n;
do
{
n = ThreadLocalRandom.current().nextInt();
}
while ( n == -1 || !observedRandomInts.add( n ) );
return n;
}
/**
* Produce a random long that hasn't been seen before by any test.
*/
private static long randomLong()
{
long n;
do
{
n = ThreadLocalRandom.current().nextLong();
}
while ( n == -1 || !observedRandomLongs.add( n ) );
return n;
}
private static int randomCapacity()
{
return ThreadLocalRandom.current().nextInt( 30, 1200 );
}
private void assertEquals( PrimitiveCollection a, PrimitiveCollection b )
{
assertThat( a, is( equalTo( b ) ) );
assertThat( b, is( equalTo( a ) ) );
assertThat( a.hashCode(), is( equalTo( b.hashCode() ) ) );
}
@Theory
public void collectionsAreNotEqualToObjectsOfOtherTypes( Factory<PrimitiveCollection> factory )
{
Object coll = factory.newInstance();
assertNotEquals( coll, new Object() );
}
@Theory
public void emptyCollectionsAreEqual(
ValueProducer values, Factory<PrimitiveCollection> factoryA, Factory<PrimitiveCollection> factoryB )
{
assumeTrue( values.isApplicable( factoryA ) );
assumeTrue( values.isApplicable( factoryB ) );
try ( PrimitiveCollection a = factoryA.newInstance();
PrimitiveCollection b = factoryB.newInstance() )
{
assertEquals( a, b );
}
}
@Theory
public void addingTheSameValuesMustProduceEqualCollections(
ValueProducer values, Factory<PrimitiveCollection> factoryA, Factory<PrimitiveCollection> factoryB )
{
assumeTrue( values.isApplicable( factoryA ) );
assumeTrue( values.isApplicable( factoryB ) );
try ( PrimitiveCollection a = factoryA.newInstance();
PrimitiveCollection b = factoryB.newInstance() )
{
Value value = values.randomValue();
value.add( a );
value.add( b );
assertEquals( a, b );
}
}
@Theory
public void addingDifferentValuesMustProduceUnequalCollections(
ValueProducer values, Factory<PrimitiveCollection> factoryA, Factory<PrimitiveCollection> factoryB )
{
assumeTrue( values.isApplicable( factoryA ) );
assumeTrue( values.isApplicable( factoryB ) );
try ( PrimitiveCollection a = factoryA.newInstance();
PrimitiveCollection b = factoryB.newInstance() )
{
values.randomValue().add( a );
values.randomValue().add( b );
assertNotEquals( a, b );
}
}
@Theory
public void differentButEquivalentMutationsShouldProduceEqualCollections(
ValueProducer values, Factory<PrimitiveCollection> factoryA, Factory<PrimitiveCollection> factoryB )
{
// Note that this test, cute as it is, also verifies that the hashCode implementation is order-invariant :)
assumeTrue( values.isApplicable( factoryA ) );
assumeTrue( values.isApplicable( factoryB ) );
try ( PrimitiveCollection a = factoryA.newInstance();
PrimitiveCollection b = factoryB.newInstance() )
{
Value x = values.randomValue();
Value y = values.randomValue();
Value z = values.randomValue();
x.add( a );
z.add( a );
z.add( b );
y.add( b );
x.add( b );
y.remove( b );
assertEquals( a, b );
}
}
@Theory
public void capacityDifferencesMustNotInfluenceEquality(
ValueProducer values, Factory<PrimitiveCollection> factoryA, Factory<PrimitiveCollection> factoryB )
{
assumeTrue( values.isApplicable( factoryA ) );
assumeTrue( values.isApplicable( factoryB ) );
try ( PrimitiveCollection a = factoryA.newInstance();
PrimitiveCollection b = factoryB.newInstance() )
{
List<Value> tmps = new ArrayList<>();
for ( int i = 0; i < 5000; i++ )
{
Value value = values.randomValue();
value.add( b );
tmps.add( value );
}
Value specificValue = values.randomValue();
specificValue.add( a );
specificValue.add( b );
for ( int i = 0; i < 5000; i++ )
{
Value value = values.randomValue();
value.add( b );
tmps.add( value );
}
Collections.shuffle( tmps );
for ( Value value : tmps )
{
value.remove( b );
}
assertEquals( a, b );
}
}
@Theory
public void hashCodeMustFollowValues(
ValueProducer values, Factory<PrimitiveCollection> factory )
{
assumeTrue( values.isApplicable( factory ) );
try ( PrimitiveCollection a = factory.newInstance() )
{
Value x = values.randomValue();
Value y = values.randomValue();
Value z = values.randomValue();
int i = a.hashCode();
x.add( a );
int j = a.hashCode();
y.add( a );
int k = a.hashCode();
z.add( a );
int l = a.hashCode();
z.remove( a );
int m = a.hashCode();
y.remove( a );
int n = a.hashCode();
x.remove( a );
int o = a.hashCode();
assertThat( "0 elm hashcode equal", o, is( i ) );
assertThat( "1 elm hashcode equal", n, is( j ) );
assertThat( "2 elm hashcode equal", m, is( k ) );
assertThat( "3 elm hashcode distinct", l, not( isOneOf( i, j, k, m, n, o ) ) );
assertThat( "2 elm hashcode distinct", k, not( isOneOf( i, j, l, n, o ) ) );
assertThat( "1 elm hashcode distinct", n, not( isOneOf( i, k, l, m, o ) ) );
assertThat( "0 elm hashcode distinct", i, not( isOneOf( j, k, l, m, n ) ) );
}
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.proto;
import static com.google.common.collect.Iterables.isEmpty;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ResourceSet;
import com.google.devtools.build.lib.actions.ResourceSetOrBuilder;
import com.google.devtools.build.lib.analysis.FilesToRunProvider;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.starlark.Args;
import com.google.devtools.build.lib.collect.nestedset.Depset;
import com.google.devtools.build.lib.collect.nestedset.Depset.ElementType;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException;
import com.google.devtools.build.lib.util.OS;
import java.util.HashSet;
import java.util.List;
import net.starlark.java.eval.Dict;
import net.starlark.java.eval.EvalException;
import net.starlark.java.eval.Starlark;
import net.starlark.java.eval.StarlarkCallable;
import net.starlark.java.eval.StarlarkFloat;
import net.starlark.java.eval.StarlarkFunction;
import net.starlark.java.eval.StarlarkInt;
import net.starlark.java.eval.StarlarkList;
import net.starlark.java.eval.StarlarkThread;
import net.starlark.java.eval.Tuple;
/** Constructs actions to run the protocol compiler to generate sources from .proto files. */
public class ProtoCompileActionBuilder {
private static final String DEFAULT_MNEMONIC = "GenProto";
@VisibleForTesting
public static final String STRICT_DEPS_FLAG_TEMPLATE =
"--direct_dependencies_violation_msg=" + ProtoConstants.STRICT_PROTO_DEPS_VIOLATION_MESSAGE;
private final ProtoInfo protoInfo;
private final FilesToRunProvider protoCompiler;
private final String progressMessage;
private final Iterable<Artifact> outputs;
private Iterable<Artifact> inputs;
private FilesToRunProvider langPlugin;
private String langPluginFormat;
private Iterable<String> langPluginParameter;
private String langPluginParameterFormat;
private boolean hasServices;
private Iterable<String> additionalCommandLineArguments;
private Iterable<FilesToRunProvider> additionalTools;
private String mnemonic;
public ProtoCompileActionBuilder allowServices(boolean hasServices) {
this.hasServices = hasServices;
return this;
}
public ProtoCompileActionBuilder setInputs(Iterable<Artifact> inputs) {
this.inputs = inputs;
return this;
}
public ProtoCompileActionBuilder setLangPlugin(
FilesToRunProvider langPlugin, String langPluginFormat) {
this.langPlugin = langPlugin;
this.langPluginFormat = langPluginFormat;
return this;
}
public ProtoCompileActionBuilder setMnemonic(String mnemonic) {
this.mnemonic = mnemonic;
return this;
}
public ProtoCompileActionBuilder setLangPluginParameter(
Iterable<String> langPluginParameter, String langPluginParameterFormat) {
this.langPluginParameter = langPluginParameter;
this.langPluginParameterFormat = langPluginParameterFormat;
return this;
}
public ProtoCompileActionBuilder setAdditionalCommandLineArguments(
Iterable<String> additionalCmdLine) {
this.additionalCommandLineArguments = additionalCmdLine;
return this;
}
public ProtoCompileActionBuilder setAdditionalTools(
Iterable<FilesToRunProvider> additionalTools) {
this.additionalTools = additionalTools;
return this;
}
public ProtoCompileActionBuilder(
ProtoInfo protoInfo,
FilesToRunProvider protoCompiler,
String progressMessage,
Iterable<Artifact> outputs) {
this.protoInfo = protoInfo;
this.protoCompiler = protoCompiler;
this.progressMessage = progressMessage;
this.outputs = outputs;
this.mnemonic = DEFAULT_MNEMONIC;
}
/** Builds a ResourceSet based on the number of inputs. */
public static class ProtoCompileResourceSetBuilder implements ResourceSetOrBuilder {
@Override
public ResourceSet buildResourceSet(OS os, int inputsSize) {
return ResourceSet.createWithRamCpu(
/* memoryMb= */ 25 + 0.15 * inputsSize, /* cpuUsage= */ 1);
}
}
public void maybeRegister(RuleContext ruleContext)
throws RuleErrorException, InterruptedException {
if (isEmpty(outputs)) {
return;
}
ruleContext.initStarlarkRuleContext();
StarlarkThread thread = ruleContext.getStarlarkThread();
Args additionalArgs = Args.newArgs(thread.mutability(), thread.getSemantics());
try {
if (langPlugin != null && langPlugin.getExecutable() != null) {
// We pass a separate langPlugin as there are plugins that cannot be overridden
// and thus we have to deal with "$xx_plugin" and "xx_plugin".
additionalArgs.addArgument(
langPlugin.getExecutable(), /*value=*/ Starlark.UNBOUND, langPluginFormat, thread);
}
if (langPluginParameter != null) {
additionalArgs.addJoined(
StarlarkList.immutableCopyOf(langPluginParameter),
/*values=*/ Starlark.UNBOUND,
/*joinWith=*/ "",
/*mapEach=*/ Starlark.NONE,
/*formatEach=*/ Starlark.NONE,
/*formatJoined=*/ langPluginParameterFormat,
/*omitIfEmpty=*/ true,
/*uniquify=*/ false,
/*expandDirectories=*/ true,
/*allowClosure=*/ false,
thread);
}
if (!hasServices) {
additionalArgs.addArgument(
"--disallow_services",
/* value = */ Starlark.UNBOUND,
/* format = */ Starlark.NONE,
thread);
}
if (additionalCommandLineArguments != null) {
additionalArgs.addAll(
StarlarkList.immutableCopyOf(additionalCommandLineArguments),
/*values=*/ Starlark.UNBOUND,
/*mapEach=*/ Starlark.NONE,
/*formatEach=*/ Starlark.NONE,
/*beforeEach=*/ Starlark.NONE,
/*omitIfEmpty=*/ true,
/*uniquify=*/ false,
/*expandDirectories=*/ true,
/*terminateWith=*/ Starlark.NONE,
/*allowClosure=*/ false,
thread);
}
} catch (EvalException e) {
throw ruleContext.throwWithRuleError(e);
}
ImmutableList.Builder<FilesToRunProvider> plugins = new ImmutableList.Builder<>();
if (additionalTools != null) {
plugins.addAll(additionalTools);
}
if (langPlugin != null) {
plugins.add(langPlugin);
}
StarlarkFunction createProtoCompileAction =
(StarlarkFunction) ruleContext.getStarlarkDefinedBuiltin("create_proto_compile_action");
ruleContext.callStarlarkOrThrowRuleError(
createProtoCompileAction,
ImmutableList.of(
/* ctx */ ruleContext.getStarlarkRuleContext(),
/* proto_info */ protoInfo,
/* proto_compiler */ protoCompiler,
/* progress_message */ progressMessage,
/* outputs */ StarlarkList.immutableCopyOf(outputs),
/* additional_args */ additionalArgs,
/* plugins */ StarlarkList.immutableCopyOf(plugins.build()),
/* mnemonic */ mnemonic,
/* additional_inputs */ inputs == null
? Depset.of(ElementType.EMPTY, NestedSetBuilder.emptySet(Order.STABLE_ORDER))
: Depset.of(Artifact.TYPE, NestedSetBuilder.wrap(Order.STABLE_ORDER, inputs)),
/* resource_set */
new StarlarkCallable() {
@Override
public String getName() {
return "proto_compile_resource_set";
}
@Override
public Object call(StarlarkThread thread, Tuple args, Dict<String, Object> kwargs) {
// args are a tuple of OS and inputsSize
int inputsSize = ((StarlarkInt) args.get(1)).toIntUnchecked();
return Dict.immutableCopyOf(
ImmutableMap.of(
"memory",
StarlarkFloat.of(25 + 0.15 * inputsSize),
"cpu",
StarlarkInt.of(1)));
}
}),
ImmutableMap.of());
}
/** Whether to allow services in the proto compiler invocation. */
public enum Services {
ALLOW,
DISALLOW,
}
/**
* Registers actions to generate code from .proto files.
*
* <p>This method uses information from proto_lang_toolchain() rules. New rules should use this
* method instead of the soup of methods above.
*
* @param outputs The artifacts that the resulting action must create.
* @param progressMessage Please use "Generating {flavorName} proto_library %{label}".
* @param allowServices If false, the compilation will break if any .proto file has service
*/
public static void registerActions(
RuleContext ruleContext,
List<ToolchainInvocation> toolchainInvocations,
ProtoInfo protoInfo,
Iterable<Artifact> outputs,
String progressMessage,
Services allowServices)
throws RuleErrorException, InterruptedException {
if (isEmpty(outputs)) {
return;
}
ProtoToolchainInfo protoToolchain = ProtoToolchainInfo.fromRuleContext(ruleContext);
if (protoToolchain == null) {
return;
}
ruleContext.initStarlarkRuleContext();
StarlarkThread thread = ruleContext.getStarlarkThread();
Args additionalArgs = Args.newArgs(thread.mutability(), thread.getSemantics());
// A set to check if there are multiple invocations with the same name.
HashSet<String> invocationNames = new HashSet<>();
ImmutableList.Builder<Object> plugins = ImmutableList.builder();
try {
for (ToolchainInvocation invocation : toolchainInvocations) {
if (!invocationNames.add(invocation.name)) {
throw new IllegalStateException(
"Invocation name "
+ invocation.name
+ " appears more than once. "
+ "This could lead to incorrect proto-compiler behavior");
}
ProtoLangToolchainProvider toolchain = invocation.toolchain;
String format = toolchain.outReplacementFormatFlag();
additionalArgs.addArgument(
invocation.outReplacement, /*value=*/ Starlark.UNBOUND, format, thread);
if (toolchain.pluginExecutable() != null) {
additionalArgs.addArgument(
toolchain.pluginExecutable().getExecutable(),
/*value=*/ Starlark.UNBOUND,
toolchain.pluginFormatFlag(),
thread);
plugins.add(toolchain.pluginExecutable());
}
additionalArgs.addJoined(
StarlarkList.immutableCopyOf(invocation.protocOpts),
/*values=*/ Starlark.UNBOUND,
/*joinWith=*/ "",
/*mapEach=*/ Starlark.NONE,
/*formatEach=*/ Starlark.NONE,
/*formatJoined=*/ Starlark.NONE,
/*omitIfEmpty=*/ true,
/*uniquify=*/ false,
/*expandDirectories=*/ true,
/*allowClosure=*/ false,
thread);
}
if (allowServices == Services.DISALLOW) {
additionalArgs.addArgument(
"--disallow_services", /*value=*/ Starlark.UNBOUND, /*format=*/ Starlark.NONE, thread);
}
} catch (EvalException e) {
throw ruleContext.throwWithRuleError(e.getMessageWithStack());
}
StarlarkFunction createProtoCompileAction =
(StarlarkFunction) ruleContext.getStarlarkDefinedBuiltin("create_proto_compile_action");
ruleContext.callStarlarkOrThrowRuleError(
createProtoCompileAction,
ImmutableList.of(
/* ctx */ ruleContext.getStarlarkRuleContext(),
/* proto_info */ protoInfo,
/* proto_compiler */ protoToolchain.getCompiler(),
/* progress_message */ progressMessage,
/* outputs */ StarlarkList.immutableCopyOf(outputs),
/* additional_args */ additionalArgs,
/* plugins */ StarlarkList.immutableCopyOf(plugins.build())),
ImmutableMap.of());
}
/**
* Describes a toolchain and the value to replace for a $(OUT) that might appear in its
* commandLine() (e.g., "bazel-out/foo.srcjar").
*/
public static class ToolchainInvocation {
final String name;
public final ProtoLangToolchainProvider toolchain;
final CharSequence outReplacement;
final ImmutableList<String> protocOpts;
public ToolchainInvocation(
String name, ProtoLangToolchainProvider toolchain, CharSequence outReplacement) {
this(name, toolchain, outReplacement, ImmutableList.of());
}
public ToolchainInvocation(
String name,
ProtoLangToolchainProvider toolchain,
CharSequence outReplacement,
ImmutableList<String> protocOpts) {
Preconditions.checkState(!name.contains(" "), "Name %s should not contain spaces", name);
this.name = name;
this.toolchain = toolchain;
this.outReplacement = outReplacement;
this.protocOpts = Preconditions.checkNotNull(protocOpts);
}
}
}
| |
/* -----------------------------------------------------------------------------
* Rule_cmdOrIntLit8.java
* -----------------------------------------------------------------------------
*
* Producer : com.parse2.aparse.Parser 2.3
* Produced : Fri Apr 12 10:40:21 MUT 2013
*
* -----------------------------------------------------------------------------
*/
package com.litecoding.smali2java.parser.cmd.and7or7xor.or;
import java.util.ArrayList;
import com.litecoding.smali2java.builder.Visitor;
import com.litecoding.smali2java.parser.ParserContext;
import com.litecoding.smali2java.parser.Rule;
import com.litecoding.smali2java.parser.Terminal_StringValue;
import com.litecoding.smali2java.parser.smali.Rule_codeRegister;
import com.litecoding.smali2java.parser.smali.Rule_codeRegisterVDst;
import com.litecoding.smali2java.parser.smali.Rule_commentSequence;
import com.litecoding.smali2java.parser.smali.Rule_intValue;
import com.litecoding.smali2java.parser.smali.Rule_listSeparator;
import com.litecoding.smali2java.parser.smali.Rule_optPadding;
import com.litecoding.smali2java.parser.smali.Rule_padding;
import com.litecoding.smali2java.parser.text.Rule_CRLF;
final public class Rule_cmdOrIntLit8 extends Rule
{
private Rule_cmdOrIntLit8(String spelling, ArrayList<Rule> rules)
{
super(spelling, rules);
}
public Object accept(Visitor visitor)
{
return visitor.visit(this);
}
public static Rule_cmdOrIntLit8 parse(ParserContext context)
{
context.push("cmdOrIntLit8");
boolean parsed = true;
int s0 = context.index;
ArrayList<Rule> e0 = new ArrayList<Rule>();
Rule rule;
parsed = false;
if (!parsed)
{
{
ArrayList<Rule> e1 = new ArrayList<Rule>();
int s1 = context.index;
parsed = true;
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_optPadding.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Terminal_StringValue.parse(context, "or-int/lit8");
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_padding.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_codeRegisterVDst.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_listSeparator.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_codeRegister.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_listSeparator.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_intValue.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_optPadding.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
@SuppressWarnings("unused")
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
int g1 = context.index;
parsed = false;
if (!parsed)
{
{
ArrayList<Rule> e2 = new ArrayList<Rule>();
int s2 = context.index;
parsed = true;
if (parsed)
{
boolean f2 = true;
int c2 = 0;
for (int i2 = 0; i2 < 1 && f2; i2++)
{
rule = Rule_padding.parse(context);
if ((f2 = rule != null))
{
e2.add(rule);
c2++;
}
}
parsed = c2 == 1;
}
if (parsed)
{
boolean f2 = true;
int c2 = 0;
for (int i2 = 0; i2 < 1 && f2; i2++)
{
rule = Rule_commentSequence.parse(context);
if ((f2 = rule != null))
{
e2.add(rule);
c2++;
}
}
parsed = c2 == 1;
}
if (parsed)
e1.addAll(e2);
else
context.index = s2;
}
}
f1 = context.index > g1;
if (parsed) c1++;
}
parsed = true;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_CRLF.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
e0.addAll(e1);
else
context.index = s1;
}
}
rule = null;
if (parsed)
rule = new Rule_cmdOrIntLit8(context.text.substring(s0, context.index), e0);
else
context.index = s0;
context.pop("cmdOrIntLit8", parsed);
return (Rule_cmdOrIntLit8)rule;
}
}
/* -----------------------------------------------------------------------------
* eof
* -----------------------------------------------------------------------------
*/
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.psi.formatter.java;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.util.IncorrectOperationException;
import static com.intellij.formatting.FormatterTestUtils.Action.REFORMAT_WITH_CONTEXT;
/**
* Is intended to hold specific java formatting tests for alignment settings (
* {@code Project Settings - Code Style - Alignment and Braces}).
*
* @author Denis Zhdanov
* @since Apr 27, 2010 6:42:00 PM
*/
public class JavaFormatterAlignmentTest extends AbstractJavaFormatterTest {
public void testChainedMethodsAlignment() {
// Inspired by IDEA-30369
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED;
getSettings().getRootSettings().getIndentOptions(StdFileTypes.JAVA).CONTINUATION_INDENT_SIZE = 8;
doTest();
}
public void testMethodAndChainedField() {
// Inspired by IDEA-79806
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doMethodTest(
"Holder.INSTANCE\n" +
" .foo();",
"Holder.INSTANCE\n" +
" .foo();"
);
}
public void testChainedMethodWithComments() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doMethodTest("AAAAA.b()\n" +
".c() // comment after line\n" +
".d()\n" +
".e();",
"AAAAA.b()\n" +
" .c() // comment after line\n" +
" .d()\n" +
" .e();");
}
public void testChainedMethodWithBlockComment() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doTextTest("class X {\n" +
" public void test() {\n" +
" AAAAAA.b()\n" +
".c()\n" +
".d()\n" +
" /* simple block comment */\n" +
".e();\n" +
" }\n" +
"}",
"class X {\n" +
" public void test() {\n" +
" AAAAAA.b()\n" +
" .c()\n" +
" .d()\n" +
" /* simple block comment */\n" +
" .e();\n" +
" }\n" +
"}");
}
public void testMultipleMethodAnnotationsCommentedInTheMiddle() {
getSettings().BLANK_LINES_AFTER_CLASS_HEADER = 1;
getSettings().getRootSettings().getIndentOptions(StdFileTypes.JAVA).INDENT_SIZE = 4;
// Inspired by IDEA-53942
doTextTest(
"public class Test {\n" +
" @Override\n" +
"// @XmlElement(name = \"Document\", required = true, type = DocumentType.class)\n" +
" @XmlTransient\n" +
" void foo() {\n" +
"}\n" +
"}",
"public class Test {\n" +
"\n" +
" @Override\n" +
"// @XmlElement(name = \"Document\", required = true, type = DocumentType.class)\n" +
" @XmlTransient\n" +
" void foo() {\n" +
" }\n" +
"}"
);
}
public void testTernaryOperator() {
// Inspired by IDEADEV-13018
getSettings().ALIGN_MULTILINE_TERNARY_OPERATION = true;
doMethodTest("int i = a ? x\n" + ": y;", "int i = a ? x\n" + " : y;");
}
public void testMethodCallArgumentsAndSmartTabs() throws IncorrectOperationException {
// Inspired by IDEADEV-20144.
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
getSettings().getRootSettings().getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true;
getSettings().getRootSettings().getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true;
doTextTest("class Foo {\n" +
" void foo() {\n" +
" bar(new Object[] {\n" +
" \"hello1\",\n" +
" \"hello2\", add(\"hello3\",\n" +
" \"world\")\n" +
"});" +
" }}", "class Foo {\n" +
"\tvoid foo() {\n" +
"\t\tbar(new Object[]{\n" +
"\t\t\t\t\"hello1\",\n" +
"\t\t\t\t\"hello2\", add(\"hello3\",\n" +
"\t\t\t\t \"world\")\n" +
"\t\t});\n" +
"\t}\n" +
"}");
}
public void testArrayInitializer() throws IncorrectOperationException {
// Inspired by IDEADEV-16136
getSettings().ARRAY_INITIALIZER_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS;
getSettings().ALIGN_MULTILINE_ARRAY_INITIALIZER_EXPRESSION = true;
doTextTest(
"@SuppressWarnings({\"UseOfSystemOutOrSystemErr\", \"AssignmentToCollectionOrArrayFieldFromParameter\", \"ReturnOfCollectionOrArrayField\"})\n" +
"public class Some {\n" +
"}",
"@SuppressWarnings({\"UseOfSystemOutOrSystemErr\",\n" +
" \"AssignmentToCollectionOrArrayFieldFromParameter\",\n" +
" \"ReturnOfCollectionOrArrayField\"})\n" +
"public class Some {\n" +
"}");
}
public void testMethodBrackets() {
// Inspired by IDEA-53013
getSettings().ALIGN_MULTILINE_METHOD_BRACKETS = true;
getSettings().ALIGN_MULTILINE_PARENTHESIZED_EXPRESSION = false;
getSettings().ALIGN_MULTILINE_PARAMETERS = true;
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
getSettings().CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = true;
getSettings().METHOD_PARAMETERS_RPAREN_ON_NEXT_LINE = true;
doClassTest(
"public void foo(int i,\n" +
" int j) {\n" +
"}\n" +
"\n" +
" public void bar() {\n" +
" foo(1,\n" +
" 2);\n" +
" }",
"public void foo(int i,\n" +
" int j\n" +
" ) {\n" +
"}\n" +
"\n" +
"public void bar() {\n" +
" foo(1,\n" +
" 2\n" +
" );\n" +
"}"
);
// Inspired by IDEA-55306
getSettings().ALIGN_MULTILINE_METHOD_BRACKETS = false;
getSettings().CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = false;
String method =
"executeCommand(new Command<Boolean>() {\n" +
" public Boolean run() throws ExecutionException {\n" +
" return doInterrupt();\n" +
" }\n" +
"});";
doMethodTest(method, method);
}
public void testFieldInColumnsAlignment() {
// Inspired by IDEA-55147
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
getSettings().FIELD_ANNOTATION_WRAP = CommonCodeStyleSettings.DO_NOT_WRAP;
getSettings().VARIABLE_ANNOTATION_WRAP = CommonCodeStyleSettings.DO_NOT_WRAP;
doTextTest(
"public class FormattingTest {\n" +
"\n" +
" int start = 1;\n" +
" double end = 2;\n" +
"\n" +
" int i2 = 1;\n" +
" double dd2,\n" +
" dd3 = 2;\n" +
"\n" +
" // asd\n" +
" char ccc3 = 'a';\n" +
" double ddd31, ddd32 = 1;\n" +
"\n" +
" private\n" +
" final String s4 = \"\";\n" +
" private\n" +
" transient int i4 = 1;\n" +
"\n" +
" private final String s5 = \"xxx\";\n" +
" private transient int iiii5 = 1;\n" +
" /*sdf*/\n" +
" @MyAnnotation(value = 1, text = 2) float f5 = 1;\n" +
"}",
"public class FormattingTest {\n" +
"\n" +
" int start = 1;\n" +
" double end = 2;\n" +
"\n" +
" int i2 = 1;\n" +
" double dd2,\n" +
" dd3 = 2;\n" +
"\n" +
" // asd\n" +
" char ccc3 = 'a';\n" +
" double ddd31, ddd32 = 1;\n" +
"\n" +
" private\n" +
" final String s4 = \"\";\n" +
" private\n" +
" transient int i4 = 1;\n" +
"\n" +
" private final String s5 = \"xxx\";\n" +
" private transient int iiii5 = 1;\n" +
" /*sdf*/\n" +
" @MyAnnotation(value = 1, text = 2) float f5 = 1;\n" +
"}"
);
}
public void testTabsAndFieldsInColumnsAlignment() {
// Inspired by IDEA-56242
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
getIndentOptions().USE_TAB_CHARACTER = true;
doTextTest(
"public class Test {\n" +
"\tprivate Long field2 = null;\n" +
"\tprivate final Object field1 = null;\n" +
"\tprivate int i = 1;\n" +
"}",
"public class Test {\n" +
"\tprivate Long field2 = null;\n" +
"\tprivate final Object field1 = null;\n" +
"\tprivate int i = 1;\n" +
"}"
);
}
public void testDoNotAlignIfNotEnabled() {
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = false;
doTextTest(
"public class Test {\n" +
"private Long field2 = null;\n" +
"private final Object field1 = null;\n" +
"private int i = 1;\n" +
"}",
"public class Test {\n" +
" private Long field2 = null;\n" +
" private final Object field1 = null;\n" +
" private int i = 1;\n" +
"}"
);
}
public void testAnnotatedAndNonAnnotatedFieldsInColumnsAlignment() {
// Inspired by IDEA-60237
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
doTextTest(
"public class Test {\n" +
" @Id\n" +
" private final String name;\n" +
" @Column(length = 2 * 1024 * 1024 /* 2 MB */)\n" +
" private String value;\n" +
" private boolean required;\n" +
" private String unsetValue;\n" +
"}",
"public class Test {\n" +
" @Id\n" +
" private final String name;\n" +
" @Column(length = 2 * 1024 * 1024 /* 2 MB */)\n" +
" private String value;\n" +
" private boolean required;\n" +
" private String unsetValue;\n" +
"}"
);
}
public void testAlignThrowsKeyword() {
// Inspired by IDEA-63820
getSettings().ALIGN_THROWS_KEYWORD = true;
doClassTest(
"public void test()\n" +
" throws Exception {}",
"public void test()\n" +
"throws Exception {\n" +
"}"
);
getSettings().ALIGN_THROWS_KEYWORD = false;
doClassTest(
"public void test()\n" +
" throws Exception {}",
"public void test()\n" +
" throws Exception {\n" +
"}"
);
}
public void testAlignResourceList() {
getSettings().KEEP_SIMPLE_BLOCKS_IN_ONE_LINE = true;
getSettings().ALIGN_MULTILINE_RESOURCES = true;
doMethodTest("try (MyResource r1 = null;\n" +
"MyResource r2 = null) { }",
"try (MyResource r1 = null;\n" +
" MyResource r2 = null) { }");
getSettings().ALIGN_MULTILINE_RESOURCES = false;
doMethodTest("try (MyResource r1 = null;\n" +
"MyResource r2 = null) { }",
"try (MyResource r1 = null;\n" +
" MyResource r2 = null) { }");
}
public void testChainedMethodCallsAfterFieldsChain_WithAlignment() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS;
doMethodTest(
"a.current.current.current.getThis().getThis().getThis();",
"a.current.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
doMethodTest(
"a.current.current.current.getThis().getThis().getThis().current.getThis().getThis().getThis().getThis();",
"a.current.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis().current.getThis()\n" +
" .getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
String onlyMethodCalls = "getThis().getThis().getThis();";
String formatedMethodCalls = "getThis().getThis()\n" +
" .getThis();";
doMethodTest(onlyMethodCalls, formatedMethodCalls);
}
public void testChainedMethodCallsAfterFieldsChain_WithoutAlignment() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = false;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS;
doMethodTest(
"a.current.current.current.getThis().getThis().getThis();",
"a.current.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
}
public void testChainedMethodCalls_WithChopDownIfLongOption() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ON_EVERY_ITEM; // it's equal to "Chop down if long"
getSettings().RIGHT_MARGIN = 50;
String before = "a.current.current.getThis().getThis().getThis().getThis().getThis();";
doMethodTest(
before,
"a.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
getSettings().RIGHT_MARGIN = 80;
doMethodTest(before, before);
}
public void testChainedMethodCalls_WithWrapIfNeededOption() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = false;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED;
getSettings().RIGHT_MARGIN = 50;
String before = "a.current.current.getThis().getThis().getThis().getThis();";
doMethodTest(
before,
"a.current.current.getThis().getThis()\n" +
" .getThis().getThis();"
);
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doMethodTest(
before,
"a.current.current.getThis().getThis()\n" +
" .getThis().getThis();"
);
getSettings().RIGHT_MARGIN = 75;
doMethodTest(before, before);
}
public void testAlignMethodCalls_PassedAsParameters_InMethodCall() {
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doMethodTest(
"test(call1(),\n" +
" call2(),\n" +
" call3());\n",
"test(call1(),\n" +
" call2(),\n" +
" call3());\n"
);
}
public void testLocalVariablesAlignment() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 2;\n" +
"String myString = \"my string\"",
"int a = 2;\n" +
"String myString = \"my string\""
);
}
public void testAlignOnlyDeclarationStatements() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" String s;\n" +
" int a = 2;\n" +
"s = \"abs\";\n" +
"long stamp = 12;",
"String s;\n" +
"int a = 2;\n" +
"s = \"abs\";\n" +
"long stamp = 12;"
);
}
public void testDoNotAlignWhenBlankLine() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 2;\n" +
"\n" +
"String myString = \"my string\"",
"int a = 2;\n" +
"\n" +
"String myString = \"my string\""
);
}
public void testDoNotAlignWhenGroupInterrupted() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 2;\n" +
"System.out.println(\"hi!\")\n" +
"String myString = \"my string\"",
"int a = 2;\n" +
"System.out.println(\"hi!\")\n" +
"String myString = \"my string\""
);
}
public void testDoNotAlignMultiDeclarations() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" int a, b = 2;\n" +
"String myString = \"my string\"",
"int a, b = 2;\n" +
"String myString = \"my string\""
);
}
public void testDoNotAlignMultilineParams() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 12;\n" +
" Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};",
"int a = 12;\n" +
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};"
);
doMethodTest(
" Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;",
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;"
);
doMethodTest(
" int ac = 99;\n" +
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;",
"int ac = 99;\n" +
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;"
);
}
public void testDoNotAlign_IfFirstMultiline() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int\n" +
" i = 0;\n" +
"int[] a = new int[]{1, 2, 0x0052, 0x0053, 0x0054};\n" +
"int var1 = 1;\n" +
"int var2 = 2;",
"int\n" +
" i = 0;\n" +
"int[] a = new int[]{1, 2, 0x0052, 0x0053, 0x0054};\n" +
"int var1 = 1;\n" +
"int var2 = 2;"
);
}
public void testAlign_InMethod() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doClassTest(
"public void run() {\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
" }",
"public void run() {\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
"}"
);
doClassTest(
"public void run() {\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"}",
"public void run() {\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"}");
}
public void test_Shift_All_AlignedParameters() {
myLineRange = new TextRange(2, 2);
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doTextTest(
REFORMAT_WITH_CONTEXT,
"public class Test {\n" +
" \n" +
" public void fooooo(String foo,\n" +
" String booo,\n" +
" String kakadoo) {\n" +
"\n" +
" }\n" +
"\n" +
"}",
"public class Test {\n" +
"\n" +
" public void fooooo(String foo,\n" +
" String booo,\n" +
" String kakadoo) {\n" +
"\n" +
" }\n" +
"\n" +
"}"
);
}
public void test_Align_UnselectedField_IfNeeded() {
myLineRange = new TextRange(2, 2);
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
doTextTest(
REFORMAT_WITH_CONTEXT,
"public class Test {\n" +
" public int i = 1;\n" +
" public String iiiiiiiiii = 2;\n" +
"}",
"public class Test {\n" +
" public int i = 1;\n" +
" public String iiiiiiiiii = 2;\n" +
"}"
);
}
public void test_Align_UnselectedVariable_IfNeeded() {
myLineRange = new TextRange(3, 3);
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doTextTest(
REFORMAT_WITH_CONTEXT,
"public class Test {\n" +
" public void test() {\n" +
" int s = 2;\n" +
" String sssss = 3;\n" +
" }\n" +
"}",
"public class Test {\n" +
" public void test() {\n" +
" int s = 2;\n" +
" String sssss = 3;\n" +
" }\n" +
"}"
);
}
public void test_Align_ConsecutiveVars_InsideIfBlock() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"if (a > 2) {\n" +
"int a=2;\n" +
"String name=\"Yarik\";\n" +
"}\n",
"if (a > 2) {\n" +
" int a = 2;\n" +
" String name = \"Yarik\";\n" +
"}\n"
);
}
public void test_Align_ConsecutiveVars_InsideForBlock() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" for (int i = 0; i < 10; i++) {\n" +
" int a=2;\n" +
" String name=\"Xa\";\n" +
" }\n",
"for (int i = 0; i < 10; i++) {\n" +
" int a = 2;\n" +
" String name = \"Xa\";\n" +
"}\n"
);
}
public void test_Align_ConsecutiveVars_InsideTryBlock() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" try {\n" +
" int x = getX();\n" +
" String name = \"Ha\";\n" +
" }\n" +
" catch (IOException exception) {\n" +
" int y = 12;\n" +
" String test = \"Test\";\n" +
" }\n" +
" finally {\n" +
" int z = 12;\n" +
" String zzzz = \"pnmhd\";\n" +
" }\n",
"try {\n" +
" int x = getX();\n" +
" String name = \"Ha\";\n" +
"} catch (IOException exception) {\n" +
" int y = 12;\n" +
" String test = \"Test\";\n" +
"} finally {\n" +
" int z = 12;\n" +
" String zzzz = \"pnmhd\";\n" +
"}\n"
);
}
public void test_Align_ConsecutiveVars_InsideCodeBlock() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" System.out.println(\"AAAA\");\n" +
" int a = 2;\n" +
" \n" +
" {\n" +
" int x=2;\n" +
" String name=3;\n" +
" }\n",
"System.out.println(\"AAAA\");\n" +
"int a = 2;\n" +
"\n" +
"{\n" +
" int x = 2;\n" +
" String name = 3;\n" +
"}\n"
);
}
public void test_AlignComments_BetweenChainedMethodCalls() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doMethodTest(
"ActionBarPullToRefresh.from(getActivity())\n" +
" // Mark the ListView as pullable\n" +
" .theseChildrenArePullable(eventsListView)\n" +
" // Set the OnRefreshListener\n" +
" .listener(this)\n" +
" // Use the AbsListView delegate for StickyListHeadersListView\n" +
" .useViewDelegate(StickyListHeadersListView.class, new AbsListViewDelegate())\n" +
" // Finally commit the setup to our PullToRefreshLayout\n" +
" .setup(mPullToRefreshLayout);",
"ActionBarPullToRefresh.from(getActivity())\n" +
" // Mark the ListView as pullable\n" +
" .theseChildrenArePullable(eventsListView)\n" +
" // Set the OnRefreshListener\n" +
" .listener(this)\n" +
" // Use the AbsListView delegate for StickyListHeadersListView\n" +
" .useViewDelegate(StickyListHeadersListView.class, new AbsListViewDelegate())\n" +
" // Finally commit the setup to our PullToRefreshLayout\n" +
" .setup(mPullToRefreshLayout);"
);
}
public void test_AlignComments_2() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doClassTest(
"public String returnWithBuilder2() {\n" +
" return MoreObjects\n" +
" .toStringHelper(this)\n" +
" .add(\"value\", value)\n" +
" // comment\n" +
" .toString();\n" +
" }",
"public String returnWithBuilder2() {\n" +
" return MoreObjects\n" +
" .toStringHelper(this)\n" +
" .add(\"value\", value)\n" +
" // comment\n" +
" .toString();\n" +
"}"
);
}
public void test_AlignSubsequentOneLineMethods() {
getSettings().KEEP_SIMPLE_METHODS_IN_ONE_LINE = true;
getSettings().ALIGN_SUBSEQUENT_SIMPLE_METHODS = true;
doTextTest(
"public class Test {\n" +
"\n" +
" public void testSuperDuperFuckerMother() { System.out.println(\"AAA\"); }\n" +
"\n" +
" public void testCounterMounter() { System.out.println(\"XXXX\"); }\n" +
"\n" +
"}",
"public class Test {\n" +
"\n" +
" public void testSuperDuperFuckerMother() { System.out.println(\"AAA\"); }\n" +
"\n" +
" public void testCounterMounter() { System.out.println(\"XXXX\"); }\n" +
"\n" +
"}"
);
}
}
| |
/*
* #%L
* wcm.io
* %%
* Copyright (C) 2014 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.caravan.hal.resource;
import java.util.List;
import java.util.regex.Pattern;
import org.osgi.annotation.versioning.ProviderType;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.ListMultimap;
/**
* Bean representation of a HAL link.
*/
@ProviderType
public final class Link implements HalObject {
/**
* Pattern that will hit an RFC 6570 URI template.
*/
private static final Pattern URI_TEMPLATE_PATTERN = Pattern.compile("\\{.+\\}");
private final ObjectNode model;
private HalResource context;
/**
* @param model JSON model
*/
public Link(JsonNode model) {
if (!(model instanceof ObjectNode)) {
throw new IllegalArgumentException("the given model must be of type ObjectNode");
}
this.model = (ObjectNode)model;
}
/**
* @param model JSON model
*/
public Link(ObjectNode model) {
this.model = model;
}
/**
* Creates a link with a new model that only contains the given URI
* @param href the URI to put in the "href" property
*/
public Link(String href) {
this.model = JsonNodeFactory.instance.objectNode();
this.setHref(href);
}
@Override
public ObjectNode getModel() {
return model;
}
/**
* @return the type
*/
public String getType() {
return model.path("type").asText(null);
}
/**
* @param type the type to set
* @return Link
*/
public Link setType(String type) {
model.put("type", type);
return this;
}
/**
* @return the deprecation
*/
public String getDeprecation() {
return model.path("deprecation").asText(null);
}
/**
* @param deprecation the deprecation to set
* @return Link
*/
public Link setDeprecation(String deprecation) {
model.put("deprecation", deprecation);
return this;
}
/**
* @return the name
*/
public String getName() {
return model.path("name").asText(null);
}
/**
* @param name the name to set
* @return Link
*/
public Link setName(String name) {
model.put("name", name);
return this;
}
/**
* @return the profile
*/
public String getProfile() {
return model.path("profile").asText(null);
}
/**
* @param profile the profile to set
* @return Link
*/
public Link setProfile(String profile) {
model.put("profile", profile);
return this;
}
/**
* @return the title
*/
public String getTitle() {
return model.path("title").asText(null);
}
/**
* @param title the title to set
* @return Link
*/
public Link setTitle(String title) {
model.put("title", title);
return this;
}
/**
* @return the hreflang
*/
public String getHreflang() {
return model.path("hreflang").asText(null);
}
/**
* @param hreflang the hreflang to set
* @return Link
*/
public Link setHreflang(String hreflang) {
model.put("hreflang", hreflang);
return this;
}
/**
* @return the href
*/
public String getHref() {
return model.path("href").asText(null);
}
/**
* @param href the href to set
* @return Link
*/
public Link setHref(String href) {
model.put("href", href);
if (href != null && URI_TEMPLATE_PATTERN.matcher(href).find()) {
setTemplated(true);
}
return this;
}
/**
* @return is templated
*/
public boolean isTemplated() {
return model.path("templated").asBoolean();
}
/**
* @param templated the templated to set
* @return Link
*/
public Link setTemplated(boolean templated) {
model.put("templated", templated);
return this;
}
/**
* Removes this link from its context resource's JSON representation
* @throws IllegalStateException if this link was never added to a resource, or has already been removed
*/
public void remove() {
if (context == null) {
throw new IllegalStateException("link with href=" + getHref() + " can not be removed, because it's not part of a HAL resource tree");
}
// iterate over all links grouped by relation (because for removal we need to know the relation)
ListMultimap<String, Link> allLinks = context.getLinks();
for (String relation : allLinks.keySet()) {
List<Link> links = allLinks.get(relation);
// use an indexed for-loop, because we need to know the index to properly remove the link
for (int i = 0; i < links.size(); i++) {
if (links.get(i).getModel() == model) {
context.removeLink(relation, i);
context = null;
return;
}
}
}
throw new IllegalStateException("the last known context resource of link with href=" + getHref() + " no longer contains this link");
}
/**
* @param contextResource the HAL resource that contains this link
*/
void setContext(HalResource contextResource) {
context = contextResource;
}
@Override
public int hashCode() {
return model.toString().hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Link)) {
return false;
}
else {
return model.toString().equals(((Link)obj).model.toString());
}
}
}
| |
/*
* Copyright 2017 Piruin Panichphol
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package me.piruin.spinney;
import android.annotation.SuppressLint;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Canvas;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.AppCompatEditText;
import android.util.AttributeSet;
import java.util.ArrayList;
import java.util.List;
/**
* Replacement of vanilla Spinner with Super-power
*
* @param <T> Type of Selectable choice to use with Spinney
*/
public class Spinney<T> extends AppCompatEditText {
static ItemPresenter defaultItemPresenter = new ItemPresenter() {
@Override public String getLabelOf(Object item, int position) {
return item.toString();
}
};
private static boolean defaultSafeMode = false;
private final CharSequence hint;
/** Dialog object to show selectable item of Spinney can be Searchable or normal List Dialog */
private Dialog dialog;
/** OnItemSelectedListener set by Library user */
private OnItemSelectedListener<T> itemSelectedListener;
/** Internal OnItemSelectedListeners use when filterBy() was called */
private List<OnItemSelectedListener<T>> _itemSelectedListeners = new ArrayList<>();
private ItemPresenter itemPresenter = defaultItemPresenter;
private ItemPresenter itemCaptionPresenter;
private SpinneyAdapter<T> adapter;
private T selectedItem;
private boolean safeMode = defaultSafeMode;
public Spinney(Context context) {
this(context, null);
}
public Spinney(Context context, AttributeSet attrs) {
this(context, attrs, android.R.attr.editTextStyle);
}
public Spinney(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
/*
Save hint at constructor because, after this getHint() will return null
when use Spinney as child of Support's TextInputLayout.
*/
hint = getHint();
}
/**
* <pre>
* Enable safe mode to all spinney use in Application by default.
* By the way, only use this in case of emergency
* </pre>
*
* @param enable or disable safe mode
*/
public static void enableSafeModeByDefault(boolean enable) {
defaultSafeMode = enable;
}
/**
* replace default global ItemPresenter this should be set at Application.onCreate()
*
* @param defaultItemDisplayer to present selected object on spinney view
*/
public static void setDefaultItemPresenter(@NonNull ItemPresenter defaultItemDisplayer) {
Spinney.defaultItemPresenter = defaultItemDisplayer;
}
/**
* <pre>
* Use this when number of items is more than user can scan by their eye.
*
* This method use inpurt list of item to create SpinneyAdapter if want to custom.
* See setSearchableAdapter(SpinneyAdpter)
* </pre>
*
* @param items list of item use
*/
public final void setSearchableItem(@NonNull final List<T> items) {
SpinneyAdapter<T> adapter = new SpinneyAdapter<>(getContext(), items, itemPresenter);
adapter.setCaptionPresenter(itemCaptionPresenter);
setSearchableAdapter(adapter);
}
/**
* Call this when build-in SpinneyAdapter not enough for you requirement
*
* @param adapter spinneyAdapter to use with SpinneyDialog
*/
public final void setSearchableAdapter(@NonNull final SpinneyAdapter<T> adapter) {
this.adapter = adapter;
SpinneyDialog searchableListDialog = new SpinneyDialog(getContext());
searchableListDialog.setAdapter(adapter);
searchableListDialog.setHint(hint);
searchableListDialog.setOnItemSelectedListener(new SpinneyDialog.OnItemSelectedListener<T>() {
@Override public boolean onItemSelected(@NonNull Object item, int position) {
whenItemSelected((T) item, position);
return true;
}
});
dialog = searchableListDialog;
}
private void whenItemSelected(@Nullable T item, int selectedIndex) {
this.selectedItem = item;
if (item == null) {
setText(null);
for (OnItemSelectedListener _listener : _itemSelectedListeners)
_listener.onItemSelected(Spinney.this, item, selectedIndex);
} else {
setText(itemPresenter.getLabelOf(item, selectedIndex));
for (OnItemSelectedListener _listener : _itemSelectedListeners)
_listener.onItemSelected(Spinney.this, item, selectedIndex);
if (itemSelectedListener != null) {
itemSelectedListener.onItemSelected(Spinney.this, item, selectedIndex);
}
}
}
/**
* enable safeMode to tell Spinney not throw exception when set selectedItem that not found in
* adapter.
* not recommend this in app that need consistency
*
* @param enable or disable saftmode
*/
public void setSafeModeEnable(boolean enable) {
this.safeMode = enable;
}
/**
* Just set List of item on Dialog! Don't worry with Adapter Spinney will handler with it
*
* @param items list of item use
*/
public final void setItems(@NonNull final List<T> items) {
adapter = new SpinneyAdapter<>(getContext(), items, itemPresenter);
adapter.setCaptionPresenter(itemCaptionPresenter);
AlertDialog.Builder builder = new AlertDialog.Builder(getContext());
builder.setTitle(getHint() != null ? getHint() : hint);
builder.setAdapter(adapter, new DialogInterface.OnClickListener() {
@Override public void onClick(DialogInterface dialogInterface, int selectedIndex) {
T selectedItem = (T) adapter.getItem(selectedIndex);
whenItemSelected(selectedItem, adapter.findPositionOf(selectedItem));
}
});
dialog = builder.create();
}
/**
* Set parent spinney and Condition to filter selectable item by selected item of parent Spinney
* <pre>
* {@code
* countrySpinney.setSearchableItem(Data.country);
* citySpinney.setItems(Data.cities);
* citySpinney.filterBy(countrySpinney, new Spinney.Condition<DatabaseItem, DatabaseItem<>() {
* public boolean filter(DatabaseItem selectedCountry, DatabaseItem eachCity) {
* return eachCity.getParentId() == selectedCountry.getId();
* }});
* }
* </pre>
*
* Please note you must setSelectedItem() of parent Spinney after call filterBy()
*
* @param parent Spinney that it selected item will affect to this spinney
* @param filter condition to filter item on this spinney by selected item of parent
* @param <K> type of item on parent Spinney
*/
public final <K> void filterBy(Spinney<K> parent, final Condition<T, K> filter) {
parent._itemSelectedListeners.add(new OnItemSelectedListener<K>() {
@Override public void onItemSelected(Spinney parent, K parentSelectedItem, int position) {
if (parentSelectedItem == null) {
clearSelection();
adapter.clearCondition();
return;
}
adapter.updateCondition(parentSelectedItem, filter);
if (!adapter.isFilteredListContain(selectedItem)) {
clearSelection();
}
}
});
adapter.setDependencyMode(true);
adapter.clearCondition();
}
public final void clearSelection() {
whenItemSelected(null, -1);
}
/** @return selected item, this may be null */
@Nullable public final T getSelectedItem() {
return selectedItem;
}
/**
* Must call after adapter or item have already set also after call <code>filterBy</code>
*
* @param item to set as selected item
* @throws IllegalArgumentException if not found item in adapter of spinney, enableSafeMode() to
* disable this exception. safeMode is disable by default
*/
public final void setSelectedItem(@Nullable T item) {
if (adapter == null) {
throw new IllegalStateException("Must set adapter or item before call this");
}
int positionOf = adapter.findPositionOf(item);
if (positionOf >= 0) {
whenItemSelected(item, positionOf);
} else if (!safeMode) throw new IllegalArgumentException("Not found specify item");
}
/**
* @return check that spinney have item to select or have nothing by filter
*/
public final boolean isSelectable() {
return adapter != null && adapter.getCount() > 0;
}
@SuppressLint("ClickableViewAccessibility") @Override public final boolean performClick() {
dialog.show();
return true;
}
@Override protected final void onDraw(Canvas canvas) {
super.onDraw(canvas);
setFocusable(false);
setClickable(true);
setLongClickable(false);
}
/** @return position of selected item, -1 is nothing select */
public final int getSelectedItemPosition() {
return adapter.findPositionOf(selectedItem);
}
/**
* This getter may help if you really need it. By the way, Use with CAUTION!
*
* @return SpinneyAdapter currently use by Spinney
*/
public final SpinneyAdapter<T> getAdapter() {
return adapter;
}
/**
* ItemPresenter to use only on instance of Spinney. Spinney will use global presenter if this not
* set
*
* @param itemPresenter to control how spinney and (Searchable)listDialog represent selectable
* item instead of global ItemPresent
*/
public final void setItemPresenter(@NonNull ItemPresenter<T> itemPresenter) {
this.itemPresenter = itemPresenter;
}
public final void setItemCaptionPresenter(@NonNull ItemPresenter<T> itemPresenter) {
this.itemCaptionPresenter = itemPresenter;
}
/** @param itemSelectedListener to callback when item was selected */
public final void setOnItemSelectedListener(
@NonNull OnItemSelectedListener<T> itemSelectedListener) {
this.itemSelectedListener = itemSelectedListener;
}
/**
* Callback like use with vanilla Spinner
*
* @param <T> type of Selectable Item. must be same as type as specify at Spinney object
*/
public interface OnItemSelectedListener<T> {
/**
* @param view Spinney view that fire this method
* @param selectedItem user selected item
* @param position at current list
*/
void onItemSelected(Spinney view, T selectedItem, int position);
}
/** Control how item used with Spinney should present as String on Spinney view and Dialog */
public interface ItemPresenter<T> {
/**
* Time to parse item to present on Spinney
*
* @param item target item to parse
* @param position of item when it was select
* @return represent String of item
*/
String getLabelOf(T item, int position);
}
/**
* Injectable condition to control whether item should present on list of Spinney or not!
*
* @param <T> Type of item to check
* @param <K> Type of value to may use as condition to present T
*/
public interface Condition<T, K> {
/**
* @param parentItem selected object of parent spinney may use as Condition to filter item
* @param item to check whether it should present or not
* @return true if item should present, false otherwise
*/
boolean filter(K parentItem, T item);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.statistics;
import static org.apache.geode.distributed.ConfigurationProperties.ARCHIVE_DISK_SPACE_LIMIT;
import static org.apache.geode.distributed.ConfigurationProperties.ARCHIVE_FILE_SIZE_LIMIT;
import static org.apache.geode.distributed.ConfigurationProperties.ENABLE_TIME_STATISTICS;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_ARCHIVE_FILE;
import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_SAMPLE_RATE;
import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_SAMPLING_ENABLED;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeFalse;
import java.io.File;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.logging.log4j.Logger;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TemporaryFolder;
import org.junit.rules.TestName;
import org.apache.geode.Statistics;
import org.apache.geode.StatisticsType;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.distributed.internal.InternalDistributedSystem;
import org.apache.geode.internal.GemFireVersion;
import org.apache.geode.internal.PureJavaMode;
import org.apache.geode.internal.cache.control.HeapMemoryMonitor;
import org.apache.geode.internal.logging.LogService;
import org.apache.geode.internal.net.SocketCreator;
import org.apache.geode.internal.statistics.GemFireStatSampler.LocalStatListenerImpl;
import org.apache.geode.internal.statistics.platform.OsStatisticsFactory;
import org.apache.geode.internal.statistics.platform.ProcessStats;
import org.apache.geode.internal.stats50.VMStats50;
import org.apache.geode.internal.util.StopWatch;
import org.apache.geode.test.junit.categories.StatisticsTest;
/**
* Integration tests for {@link GemFireStatSampler}.
*
* @since GemFire 7.0
*/
@Category({StatisticsTest.class})
public class GemFireStatSamplerIntegrationTest extends StatSamplerTestCase {
private static final Logger logger = LogService.getLogger();
private static final int STAT_SAMPLE_RATE = 1000;
private DistributedSystem system;
private File testDir;
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule
public TestName testName = new TestName();
@Before
public void setUp() throws Exception {
this.testDir = this.temporaryFolder.getRoot();
assertTrue(this.testDir.exists());
}
/**
* Removes the loner DistributedSystem at the end of each test.
*/
@After
public void tearDown() throws Exception {
System.clearProperty(GemFireStatSampler.TEST_FILE_SIZE_LIMIT_IN_KB_PROPERTY);
disconnect();
}
/**
* Tests the majority of getters and the basic functionality of the sampler.
*
* This test is skipped when running on Windows 7 because ProcessStats is not created for this OS.
* See #45395.
*/
@Test
public void testBasics() throws Exception {
connect(createGemFireProperties());
GemFireStatSampler statSampler = getGemFireStatSampler();
assertTrue(statSampler.waitForInitialization(5000));
assertEquals(0, statSampler.getArchiveFileSizeLimit());
assertEquals(0, statSampler.getArchiveDiskSpaceLimit());
assertEquals(STAT_SAMPLE_RATE, statSampler.getSampleRate());
assertEquals(true, statSampler.isSamplingEnabled());
int statsCount = statSampler.getStatisticsManager().getStatisticsCount();
assertEquals(statsCount, statSampler.getStatisticsModCount());
assertEquals(statsCount, statSampler.getStatisticsManager().getStatisticsCount());
assertEquals(statsCount, statSampler.getStatistics().length);
Assert.assertEquals(getStatisticsManager().getId(), statSampler.getSystemId());
assertTrue(statSampler.getSystemStartTime() < System.currentTimeMillis());
Assert.assertEquals(SocketCreator.getHostName(SocketCreator.getLocalHost()),
statSampler.getSystemDirectoryPath());
VMStatsContract vmStats = statSampler.getVMStats();
assertNotNull(vmStats);
assertTrue(vmStats instanceof VMStats50);
/*
* NOTE: VMStats50 is not an instance of Statistics but instead its instance contains 3
* instances of Statistics: 1) vmStats 2) heapMemStats 3) nonHeapMemStats
*/
Method getProcessStats = getGemFireStatSampler().getClass().getMethod("getProcessStats");
assertNotNull(getProcessStats);
}
@Test
public void testBasicProcessStats() throws Exception {
final String osName = System.getProperty("os.name", "unknown");
assumeFalse(osName.contains("Windows"));
connect(createGemFireProperties());
GemFireStatSampler statSampler = getGemFireStatSampler();
assertTrue(statSampler.waitForInitialization(5000));
ProcessStats processStats = statSampler.getProcessStats();
AllStatistics allStats = new AllStatistics(statSampler);
if (osName.equals("SunOS")) {
assertNotNull(processStats);
assertTrue(PureJavaMode.osStatsAreAvailable());
assertTrue(allStats.containsStatisticsType("SolarisProcessStats"));
assertTrue(allStats.containsStatisticsType("SolarisSystemStats"));
} else if (osName.startsWith("Windows")) {
// fails on Windows 7: 45395 "ProcessStats are not created on Windows 7"
assertNotNull("ProcessStats were not created on " + osName, processStats);
assertTrue(PureJavaMode.osStatsAreAvailable());
assertTrue(allStats.containsStatisticsType("WindowsProcessStats"));
assertTrue(allStats.containsStatisticsType("WindowsSystemStats"));
} else if (osName.startsWith("Linux")) {
assertNotNull(processStats);
assertTrue(PureJavaMode.osStatsAreAvailable());
assertTrue(allStats.containsStatisticsType("LinuxProcessStats"));
assertTrue(allStats.containsStatisticsType("LinuxSystemStats"));
} else if (osName.equals("Mac OS X")) {
assertNull(processStats);
assertFalse(PureJavaMode.osStatsAreAvailable());
assertFalse(allStats.containsStatisticsType("OSXProcessStats"));
assertFalse(allStats.containsStatisticsType("OSXSystemStats"));
} else {
assertNull(processStats);
}
String productDesc = statSampler.getProductDescription();
assertTrue(productDesc.contains(GemFireVersion.getGemFireVersion()));
assertTrue(productDesc.contains(GemFireVersion.getBuildId()));
assertTrue(productDesc.contains(GemFireVersion.getSourceDate()));
}
/**
* Tests that the configured archive file is created and exists.
*/
@Test
public void testArchiveFileExists() throws Exception {
final String dir = this.testDir.getAbsolutePath();
final String archiveFileName = dir + File.separator + this.testName.getMethodName() + ".gfs";
final File archiveFile1 =
new File(dir + File.separator + this.testName.getMethodName() + ".gfs");
Properties props = createGemFireProperties();
props.setProperty(STATISTIC_ARCHIVE_FILE, archiveFileName);
connect(props);
GemFireStatSampler statSampler = getGemFireStatSampler();
assertTrue(statSampler.waitForInitialization(5000));
final File archiveFile = statSampler.getArchiveFileName();
assertNotNull(archiveFile);
assertEquals(archiveFile1, archiveFile);
waitForFileToExist(archiveFile, 5000, 10);
assertTrue(
"File name incorrect: archiveFile.getName()=" + archiveFile.getName()
+ " archiveFile.getAbsolutePath()=" + archiveFile.getAbsolutePath()
+ " getCanonicalPath()" + archiveFile.getCanonicalPath(),
archiveFileName.contains(archiveFile.getName()));
}
/**
* Tests the statistics sample rate within an acceptable margin of error.
*/
@Test
public void testSampleRate() throws Exception {
connect(createGemFireProperties());
GemFireStatSampler statSampler = getGemFireStatSampler();
assertTrue(statSampler.waitForInitialization(5000));
assertEquals(STAT_SAMPLE_RATE, statSampler.getSampleRate());
assertTrue(getStatisticsManager().getStatListModCount() > 0);
List<Statistics> statistics = getStatisticsManager().getStatsList();
assertNotNull(statistics);
assertTrue(statistics.size() > 0);
StatisticsType statSamplerType = getStatisticsManager().findType("StatSampler");
Statistics[] statsArray = getStatisticsManager().findStatisticsByType(statSamplerType);
assertEquals(1, statsArray.length);
final Statistics statSamplerStats = statsArray[0];
final int initialSampleCount = statSamplerStats.getInt("sampleCount");
final int expectedSampleCount = initialSampleCount + 2;
waitForExpectedStatValue(statSamplerStats, "sampleCount", expectedSampleCount, 5000, 10);
}
/**
* Adds a LocalStatListener for an individual stat. Validates that it receives notifications.
* Removes the listener and validates that it was in fact removed and no longer receives
* notifications.
*/
@Test
public void testLocalStatListener() throws Exception {
connect(createGemFireProperties());
GemFireStatSampler statSampler = getGemFireStatSampler();
assertTrue(statSampler.waitForInitialization(5000));
Method getLocalListeners = getGemFireStatSampler().getClass().getMethod("getLocalListeners");
assertNotNull(getLocalListeners);
Method addLocalStatListener = getGemFireStatSampler().getClass()
.getMethod("addLocalStatListener", LocalStatListener.class, Statistics.class, String.class);
assertNotNull(addLocalStatListener);
Method removeLocalStatListener = getGemFireStatSampler().getClass()
.getMethod("removeLocalStatListener", LocalStatListener.class);
assertNotNull(removeLocalStatListener);
// validate that there are no listeners
assertTrue(statSampler.getLocalListeners().isEmpty());
// add a listener for sampleCount stat in StatSampler statistics
StatisticsType statSamplerType = getStatisticsManager().findType("StatSampler");
Statistics[] statsArray = getStatisticsManager().findStatisticsByType(statSamplerType);
assertEquals(1, statsArray.length);
final Statistics statSamplerStats = statsArray[0];
final String statName = "sampleCount";
final AtomicInteger sampleCountValue = new AtomicInteger(0);
final AtomicInteger sampleCountChanged = new AtomicInteger(0);
LocalStatListener listener = new LocalStatListener() {
public void statValueChanged(double value) {
sampleCountValue.set((int) value);
sampleCountChanged.incrementAndGet();
}
};
statSampler.addLocalStatListener(listener, statSamplerStats, statName);
assertTrue(statSampler.getLocalListeners().size() == 1);
// there's a level of indirection here and some protected member fields
LocalStatListenerImpl lsli =
(LocalStatListenerImpl) statSampler.getLocalListeners().iterator().next();
assertEquals("sampleCount", lsli.stat.getName());
// wait for the listener to update 4 times
final int expectedChanges = 4;
boolean done = false;
try {
for (StopWatch time = new StopWatch(true); !done && time.elapsedTimeMillis() < 5000; done =
(sampleCountChanged.get() >= expectedChanges)) {
Thread.sleep(10);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
assertTrue("Waiting for sampleCountChanged >= " + expectedChanges, done);
// validate that the listener fired and updated the value
assertTrue(sampleCountValue.get() > 0);
assertTrue(sampleCountChanged.get() >= expectedChanges);
// remove the listener
statSampler.removeLocalStatListener(listener);
final int expectedSampleCountValue = sampleCountValue.get();
final int expectedSampleCountChanged = sampleCountChanged.get();
// validate that there are no listeners now
assertTrue(statSampler.getLocalListeners().isEmpty());
// wait for 2 stat samples to occur
waitForStatSample(statSamplerStats, expectedSampleCountValue, 5000, 10);
// validate that the listener did not fire
assertEquals(expectedSampleCountValue, sampleCountValue.get());
assertEquals(expectedSampleCountChanged, sampleCountChanged.get());
}
/**
* Invokes stop() and then validates that the sampler did in fact stop.
*/
@Test
public void testStop() throws Exception {
connect(createGemFireProperties());
GemFireStatSampler statSampler = getGemFireStatSampler();
assertTrue(statSampler.waitForInitialization(5000));
// validate the stat sampler is running
StatisticsType statSamplerType = getStatisticsManager().findType("StatSampler");
Statistics[] statsArray = getStatisticsManager().findStatisticsByType(statSamplerType);
assertEquals(1, statsArray.length);
final Statistics statSamplerStats = statsArray[0];
final int initialSampleCount = statSamplerStats.getInt("sampleCount");
final int expectedSampleCount = initialSampleCount + 2;
waitForStatSample(statSamplerStats, expectedSampleCount, 20000, 10);
// stop the stat sampler
statSampler.stop();
// validate the stat sampler has stopped
final int stoppedSampleCount = statSamplerStats.getInt("sampleCount");
// the following should timeout rather than complete
assertStatValueDoesNotChange(statSamplerStats, "sampleCount", stoppedSampleCount, 5000, 10);
assertEquals(stoppedSampleCount, statSamplerStats.getInt("sampleCount"));
}
/**
* Verifies that archive rolling works correctly when archive-file-size-limit is specified.
*/
@Test
public void testArchiveRolling() throws Exception {
final String dirName = this.testDir.getAbsolutePath() + File.separator + this.testName;
new File(dirName).mkdirs();
final String archiveFileName = dirName + File.separator + this.testName + ".gfs";
final File archiveFile = new File(archiveFileName);
final File archiveFile1 = new File(dirName + File.separator + this.testName + "-01-01.gfs");
final File archiveFile2 = new File(dirName + File.separator + this.testName + "-01-02.gfs");
final File archiveFile3 = new File(dirName + File.separator + this.testName + "-01-03.gfs");
// set the system property to use KB instead of MB for file size
System.setProperty(HostStatSampler.TEST_FILE_SIZE_LIMIT_IN_KB_PROPERTY, "true");
Properties props = createGemFireProperties();
props.setProperty(ARCHIVE_FILE_SIZE_LIMIT, "1");
props.setProperty(ARCHIVE_DISK_SPACE_LIMIT, "0");
props.setProperty(STATISTIC_ARCHIVE_FILE, archiveFileName);
connect(props);
assertTrue(getGemFireStatSampler().waitForInitialization(5000));
boolean done = false;
try {
for (StopWatch time = new StopWatch(true); !done && time.elapsedTimeMillis() < 4000; done =
(getSampleCollector() != null && getSampleCollector().getStatArchiveHandler() != null)) {
Thread.sleep(10);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
assertTrue("Waiting for getSampleCollector().getStatArchiveHandler() to not be null", done);
StatArchiveHandler statArchiveHandler = getSampleCollector().getStatArchiveHandler();
StatArchiveHandlerConfig config = statArchiveHandler.getStatArchiveHandlerConfig();
assertEquals(1 * 1024, config.getArchiveFileSizeLimit());
waitForFileToExist(archiveFile, 4000, 10);
waitForFileToExist(archiveFile1, 4000, 10);
waitForFileToExist(archiveFile2, 4000, 10);
waitForFileToExist(archiveFile3, 4000, 10);
}
/**
* Verifies that archive removal works correctly when archive-disk-space-limit is specified.
*/
@Test
public void testArchiveRemoval() throws Exception {
final String dirName = this.testDir.getAbsolutePath();// + File.separator + this.testName;
new File(dirName).mkdirs();
final String archiveFileName = dirName + File.separator + this.testName + ".gfs";
final File archiveFile = new File(archiveFileName);
final File archiveFile1 = new File(dirName + File.separator + this.testName + "-01-01.gfs");
final File archiveFile2 = new File(dirName + File.separator + this.testName + "-01-02.gfs");
final File archiveFile3 = new File(dirName + File.separator + this.testName + "-01-03.gfs");
final File archiveFile4 = new File(dirName + File.separator + this.testName + "-01-04.gfs");
final int sampleRate = 1000;
// set the system property to use KB instead of MB for file size
System.setProperty(HostStatSampler.TEST_FILE_SIZE_LIMIT_IN_KB_PROPERTY, "true");
Properties props = createGemFireProperties();
props.setProperty(STATISTIC_ARCHIVE_FILE, archiveFileName);
props.setProperty(ARCHIVE_FILE_SIZE_LIMIT, "1");
props.setProperty(ARCHIVE_DISK_SPACE_LIMIT, "12");
props.setProperty(STATISTIC_SAMPLE_RATE, String.valueOf(sampleRate));
connect(props);
assertTrue(getGemFireStatSampler().waitForInitialization(5000));
boolean exists1 = false;
boolean exists2 = false;
boolean exists3 = false;
boolean exists4 = false;
boolean exists = false;
boolean done = false;
try {
for (StopWatch time = new StopWatch(true); !done
&& time.elapsedTimeMillis() < 10 * sampleRate;) {
exists1 = exists1 || archiveFile1.exists();
exists2 = exists2 || archiveFile2.exists();
exists3 = exists3 || archiveFile3.exists();
exists4 = exists4 || archiveFile4.exists();
exists = exists || archiveFile.exists();
done = exists1 && exists2 && exists3 && exists4 && exists;
if (!done) {
Thread.sleep(10);
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
assertTrue("Waiting for archive files to exist:" + " exists1=" + exists1 + " exists2=" + exists2
+ " exists3=" + exists3 + " exists4=" + exists4 + " exists=" + exists, done);
waitForFileToDelete(archiveFile1, 10 * sampleRate, 10);
}
@Test
public void testLocalStatListenerRegistration() throws Exception {
connect(createGemFireProperties());
final GemFireStatSampler statSampler = getGemFireStatSampler();
statSampler.waitForInitialization(5000);
final AtomicBoolean flag = new AtomicBoolean(false);
final LocalStatListener listener = new LocalStatListener() {
public void statValueChanged(double value) {
flag.set(true);
}
};
final String tenuredPoolName = HeapMemoryMonitor.getTenuredMemoryPoolMXBean().getName();
logger.info("TenuredPoolName: {}", tenuredPoolName);
boolean done = false;
try {
for (StopWatch time = new StopWatch(true); !done && time.elapsedTimeMillis() < 5000;) {
Thread.sleep(10);
Statistics si =
HeapMemoryMonitor.getTenuredPoolStatistics((StatisticsManager) this.system);
if (si != null) {
statSampler.addLocalStatListener(listener, si, "currentUsedMemory");
done = true;
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
assertTrue("Waiting for " + tenuredPoolName + " statistics to be added to create listener for",
done);
assertTrue(
"expected at least one stat listener, found " + statSampler.getLocalListeners().size(),
statSampler.getLocalListeners().size() > 0);
long maxTenuredMemory = HeapMemoryMonitor.getTenuredMemoryPoolMXBean().getUsage().getMax();
// byte[] bytes = new byte[1024 * 1024 * 10];
byte[] bytes = new byte[(int) (maxTenuredMemory * 0.01)];
Arrays.fill(bytes, Byte.MAX_VALUE);
done = false;
try {
for (StopWatch time = new StopWatch(true); !done && time.elapsedTimeMillis() < 5000; done =
(flag.get())) {
Thread.sleep(10);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
assertTrue("Waiting for listener to set flag to true", done);
}
@Override
protected StatisticsManager getStatisticsManager() {
return (InternalDistributedSystem) this.system;
}
protected OsStatisticsFactory getOsStatisticsFactory() {
return (InternalDistributedSystem) this.system;
}
private GemFireStatSampler getGemFireStatSampler() {
return ((InternalDistributedSystem) this.system).getStatSampler();
}
private SampleCollector getSampleCollector() {
return getGemFireStatSampler().getSampleCollector();
}
private Properties createGemFireProperties() {
Properties props = new Properties();
props.setProperty(STATISTIC_SAMPLING_ENABLED, "true"); // TODO: test true/false
props.setProperty(ENABLE_TIME_STATISTICS, "true"); // TODO: test true/false
props.setProperty(STATISTIC_SAMPLE_RATE, String.valueOf(STAT_SAMPLE_RATE));
props.setProperty(ARCHIVE_FILE_SIZE_LIMIT, "0");
props.setProperty(ARCHIVE_DISK_SPACE_LIMIT, "0");
props.setProperty(MCAST_PORT, "0");
props.setProperty(LOCATORS, "");
return props;
}
/**
* Creates a fresh loner DistributedSystem for each test. Note that the DistributedSystem is the
* StatisticsManager/Factory/etc.
*/
@SuppressWarnings("deprecation")
private void connect(Properties props) {
this.system = DistributedSystem.connect(props);
}
@SuppressWarnings("deprecation")
private void disconnect() {
if (this.system != null) {
this.system.disconnect();
this.system = null;
}
}
// public static class AsyncInvoker {
// public static AsyncInvocation invokeAsync(Runnable r) {
// return invokeAsync(r, "run", new Object[0]);
// }
// public static AsyncInvocation invokeAsync(Callable c) {
// return invokeAsync(c, "call", new Object[0]);
// }
// public static AsyncInvocation invokeAsync(
// final Object o, final String methodName, final Object[] args) {
// AsyncInvocation ai =
// new AsyncInvocation(o, methodName, new Runnable() {
// public void run() {
// MethExecutorResult result =
// MethExecutor.executeObject(o, methodName, args);
// if (result.exceptionOccurred()) {
// throw new AsyncInvocationException(result.getException());
// }
// AsyncInvocation.setReturnValue(result.getResult());
// }
// });
// ai.start();
// return ai;
// }
//
// public static class AsyncInvocationException extends RuntimeException {
// private static final long serialVersionUID = -5522299018650622945L;
// /**
// * Creates a new <code>AsyncInvocationException</code>.
// */
// public AsyncInvocationException(String message) {
// super(message);
// }
//
// /**
// * Creates a new <code>AsyncInvocationException</code> that was
// * caused by a given exception
// */
// public AsyncInvocationException(String message, Throwable thr) {
// super(message, thr);
// }
//
// /**
// * Creates a new <code>AsyncInvocationException</code> that was
// * caused by a given exception
// */
// public AsyncInvocationException(Throwable thr) {
// super(thr.getMessage(), thr);
// }
// }
// }
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.util.io;
import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.testFramework.rules.TempDirectory;
import org.junit.Rule;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileLock;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
/**
* @author Irina.Chernushina, lene
*/
public class FileUtilHeavyTest {
@Rule public TempDirectory tempDir = new TempDirectory();
@Test
public void testProcessSimple() throws IOException {
setupVisitorTestDirectories();
Map<String, Integer> result = new HashMap<>();
FileUtil.processFilesRecursively(tempDir.getRoot(), file -> {
Integer integer = result.get(file.getName());
result.put(file.getName(), integer == null ? 1 : (integer + 1));
return true;
});
assertEquals(6, result.size());
assertEquals(1, result.get(tempDir.getRoot().getName()).intValue());
assertEquals(3, result.get("1").intValue());
assertEquals(3, result.get("2").intValue());
assertEquals(1, result.get("dir1").intValue());
}
@Test
public void testProcessStops() throws IOException {
setupVisitorTestDirectories();
int[] cnt = {0};
FileUtil.processFilesRecursively(tempDir.getRoot(), file -> {
++cnt[0];
return false;
});
assertEquals(1, cnt[0]);
}
@Test
@SuppressWarnings("deprecation")
public void testProcessDirectoryFilter() throws IOException {
setupVisitorTestDirectories();
Map<String, Integer> result = new HashMap<>();
FileUtil.processFilesRecursively(tempDir.getRoot(), file -> {
Integer integer = result.get(file.getName());
result.put(file.getName(), integer == null ? 1 : (integer + 1));
return true;
}, file -> ! "dir2".equals(file.getName()));
assertEquals(5, result.size());
assertEquals(1, result.get(tempDir.getRoot().getName()).intValue());
assertEquals(1, result.get("1").intValue());
assertEquals(1, result.get("2").intValue());
assertEquals(1, result.get("dir1").intValue());
assertEquals(1, result.get("dir2").intValue());
assertNull(result.get("dir21"));
}
private void setupVisitorTestDirectories() throws IOException {
tempDir.newFile("dir1/1");
tempDir.newFile("dir1/2");
tempDir.newFile("dir2/1");
tempDir.newFile("dir2/2");
tempDir.newFile("dir2/inner/1");
tempDir.newFile("dir2/inner/2");
}
@Test
public void nonExistingFileInNonExistentDirectory() {
assertThat(FileUtil.findFileInProvidedPath("123", "zero")).isNullOrEmpty();
}
@Test
public void nonExistingFileInDirectory() {
assertThat(FileUtil.findFileInProvidedPath(tempDir.getRoot().getPath(), "zero")).isNullOrEmpty();
}
@Test
public void nonExistingFile() throws IOException {
File first = tempDir.newFile("first");
assertThat(FileUtil.findFileInProvidedPath(first.getPath() + "123", first.getName() + "123")).isNullOrEmpty();
}
@Test
public void existingFileInDirectory() throws IOException {
File first = tempDir.newFile("first");
assertThat(FileUtil.findFileInProvidedPath(tempDir.getRoot().getPath(), "first")).isEqualTo(first.getPath());
}
@Test
public void existingFile() throws IOException {
File first = tempDir.newFile("first");
assertThat(FileUtil.findFileInProvidedPath(first.getPath(), "first")).isEqualTo(first.getPath());
}
@Test
public void twoFilesOrderInDirectory() throws IOException {
File first = tempDir.newFile("first");
tempDir.newFile("second");
assertThat(FileUtil.findFileInProvidedPath(tempDir.getRoot().getPath(), "first", "second")).isEqualTo(first.getPath());
}
@Test
public void twoFilesOrderInDirectory2() throws IOException {
tempDir.newFile("first");
File second = tempDir.newFile("second");
assertThat(FileUtil.findFileInProvidedPath(tempDir.getRoot().getPath(), "second", "first")).isEqualTo(second.getPath());
}
@Test
public void twoFilesOrder() throws IOException {
File first = tempDir.newFile("first");
tempDir.newFile("second");
assertThat(FileUtil.findFileInProvidedPath(first.getPath(), "first", "second")).isEqualTo(first.getPath());
}
@Test
public void twoFilesOrder2() throws IOException {
File first = tempDir.newFile("first");
tempDir.newFile("second");
assertThat(FileUtil.findFileInProvidedPath(first.getPath(), "first", "second")).isEqualTo(first.getPath());
}
@Test
public void testDeleteFail() throws IOException {
File targetDir = tempDir.newFolder("dir");
File file = tempDir.newFile("dir/file");
if (SystemInfo.isWindows) {
try (RandomAccessFile rw = new RandomAccessFile(file, "rw"); FileLock ignored = rw.getChannel().tryLock()) {
assertFalse(FileUtil.delete(file));
}
}
else {
assertTrue(targetDir.setWritable(false, false));
try {
assertFalse(FileUtil.delete(file));
}
finally {
assertTrue(targetDir.setWritable(true, true));
}
}
}
@Test
public void testRepeatableOperation() throws IOException {
abstract class CountableIOOperation implements FileUtilRt.RepeatableIOOperation<Boolean, IOException> {
private int count = 0;
@Override
public Boolean execute(boolean lastAttempt) {
count++;
return stop(lastAttempt) ? true : null;
}
protected abstract boolean stop(boolean lastAttempt);
}
CountableIOOperation successful = new CountableIOOperation() {
@Override protected boolean stop(boolean lastAttempt) { return true; }
};
FileUtilRt.doIOOperation(successful);
assertEquals(1, successful.count);
CountableIOOperation failed = new CountableIOOperation() {
@Override protected boolean stop(boolean lastAttempt) { return false; }
};
FileUtilRt.doIOOperation(failed);
assertEquals(10, failed.count);
CountableIOOperation lastShot = new CountableIOOperation() {
@Override protected boolean stop(boolean lastAttempt) { return lastAttempt; }
};
FileUtilRt.doIOOperation(lastShot);
assertEquals(10, lastShot.count);
}
@Test
public void testSymlinkDeletion() throws IOException {
IoTestUtil.assumeSymLinkCreationIsSupported();
File targetDir = tempDir.newFolder("target");
File targetFile = tempDir.newFile("target/file");
File directDirLink = new File(tempDir.getRoot(), "dirLink");
Files.createSymbolicLink(directDirLink.toPath(), targetDir.toPath());
File directFileLink = new File(tempDir.getRoot(), "fileLink");
Files.createSymbolicLink(directFileLink.toPath(), targetFile.toPath());
File linkParentDir = tempDir.newFolder("linkParent");
Files.createSymbolicLink(new File(linkParentDir, "link").toPath(), targetDir.toPath());
FileUtil.delete(directFileLink);
FileUtil.delete(directDirLink);
FileUtil.delete(linkParentDir);
assertThat(directFileLink).doesNotExist();
assertThat(directDirLink).doesNotExist();
assertThat(linkParentDir).doesNotExist();
assertThat(targetFile).exists();
}
@Test
public void testJunctionDeletion() throws IOException {
IoTestUtil.assumeWindows();
File targetDir = tempDir.newFolder("target");
File targetFile = tempDir.newFile("target/file");
File directDirLink = new File(tempDir.getRoot(), "dirLink");
IoTestUtil.createJunction(targetDir.getPath(), directDirLink.getPath());
File linkParentDir = tempDir.newFolder("linkParent");
IoTestUtil.createJunction(targetDir.getPath(), new File(linkParentDir, "link").getPath());
FileUtil.delete(directDirLink);
FileUtil.delete(linkParentDir);
assertThat(directDirLink).doesNotExist();
assertThat(linkParentDir).doesNotExist();
assertThat(targetFile).exists();
}
@Test
public void nioDeletion() throws IOException {
try (FileSystem fs = MemoryFileSystemBuilder.newEmpty().build(FileUtilHeavyTest.class.getSimpleName())) {
Path dir = Files.createDirectory(fs.getPath("dir"));
Path file1 = Files.createFile(fs.getPath("dir", "file1"));
Path file2 = Files.createFile(fs.getPath("dir", "file2"));
assertThat(Files.list(dir)).containsExactlyInAnyOrder(file1, file2);
FileUtil.delete(dir);
assertThat(dir).doesNotExist();
Path nonExisting = fs.getPath("non-existing");
assertThat(nonExisting).doesNotExist();
FileUtil.delete(nonExisting);
}
}
@Test
public void deletingNonExistentFile() throws IOException {
File missing = new File(tempDir.getRoot(), "missing");
FileUtil.delete(missing.toPath());
assertTrue(FileUtil.delete(missing));
}
@Test
public void testToCanonicalPathSymLinksAware() throws IOException {
IoTestUtil.assumeSymLinkCreationIsSupported();
File rootDir = tempDir.newFolder("root");
tempDir.newFolder("root/dir1/dir2/dir3/dir4");
String root = FileUtil.toSystemIndependentName(FileUtil.resolveShortWindowsName(rootDir.getPath()));
// non-recursive link
Files.createSymbolicLink(new File(rootDir, "dir1/dir2_link").toPath(), new File(rootDir, "dir1/dir2").toPath());
// recursive links to a parent dir
Files.createSymbolicLink(new File(rootDir, "dir1/dir1_link").toPath(), new File(rootDir, "dir1").toPath());
// I) links should NOT be resolved when ../ stays inside the linked path
// I.I) non-recursive links
assertEquals(root + "/dir1/dir2_link", FileUtil.toCanonicalPath(root + "/dir1/dir2_link/./", true));
assertEquals(root + "/dir1/dir2_link", FileUtil.toCanonicalPath(root + "/dir1/dir2_link/dir3/../", true));
assertEquals(root + "/dir1/dir2_link/dir3", FileUtil.toCanonicalPath(root + "/dir1/dir2_link/dir3/dir4/../", true));
assertEquals(root + "/dir1/dir2_link", FileUtil.toCanonicalPath(root + "/dir1/dir2_link/dir3/dir4/../../", true));
assertEquals(root + "/dir1/dir2_link", FileUtil.toCanonicalPath(root + "/dir1/../dir1/dir2_link/dir3/../", true));
// I.II) recursive links
assertEquals(root + "/dir1/dir1_link", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/./", true));
assertEquals(root + "/dir1/dir1_link", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/dir2/../", true));
assertEquals(root + "/dir1/dir1_link/dir2", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/dir2/dir3/../", true));
assertEquals(root + "/dir1/dir1_link", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/dir2/dir3/../../", true));
assertEquals(root + "/dir1/dir1_link", FileUtil.toCanonicalPath(root + "/dir1/../dir1/dir1_link/dir2/../", true));
// II) links should be resolved is ../ escapes outside
// II.I) non-recursive links
assertEquals(root + "/dir1", FileUtil.toCanonicalPath(root + "/dir1/dir2_link/../", true));
assertEquals(root + "/dir1/dir2", FileUtil.toCanonicalPath(root + "/dir1/dir2_link/../dir2", true));
assertEquals(root + "/dir1/dir2", FileUtil.toCanonicalPath(root + "/dir1/dir2_link/../../dir1/dir2", true));
assertEquals(root + "/dir1/dir2", FileUtil.toCanonicalPath(root + "/dir1/dir2_link/dir3/../../dir2", true));
assertEquals(root + "/dir1/dir2", FileUtil.toCanonicalPath(root + "/dir1/dir2_link/dir3/../../../dir1/dir2", true));
assertEquals(root + "/dir1/dir2", FileUtil.toCanonicalPath(root + "/dir1/../dir1/dir2_link/../dir2", true));
// II.I) recursive links
// the rules seems to be different when ../ goes over recursive link:
// * on Windows ../ goes to link's parent
// * on Unix ../ goes to target's parent
if (SystemInfo.isWindows) {
assertEquals(root + "/dir1", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/../", true));
assertEquals(root + "/dir1/dir2", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/../dir2", true));
assertEquals(root + "/dir1/dir2", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/../../dir1/dir2", true));
assertEquals(root + "/dir1/dir2", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/dir2/../../dir2", true));
assertEquals(root + "/dir1/dir2", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/dir2/../../../dir1/dir2", true));
assertEquals(root + "/dir1/dir2", FileUtil.toCanonicalPath(root + "/dir1/../dir1/dir1_link/../dir2", true));
}
else {
assertEquals(root, FileUtil.toCanonicalPath(root + "/dir1/dir1_link/../", true));
assertEquals(root + "/dir1", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/../dir1", true));
assertEquals(root + "/dir1", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/../../root/dir1", true));
assertEquals(root + "/dir1", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/dir2/../../dir1", true));
assertEquals(root + "/dir1", FileUtil.toCanonicalPath(root + "/dir1/dir1_link/dir2/../../../root/dir1", true));
assertEquals(root + "/dir1", FileUtil.toCanonicalPath(root + "/dir1/../dir1/dir1_link/../dir1", true));
}
// some corner cases, behavior should be the same as the default FileUtil.toCanonicalPath
assertEquals(FileUtil.toCanonicalPath("..", false), FileUtil.toCanonicalPath("..", true));
assertEquals(FileUtil.toCanonicalPath("../", false), FileUtil.toCanonicalPath("../", true));
assertEquals(FileUtil.toCanonicalPath("/..", false), FileUtil.toCanonicalPath("/..", true));
assertEquals(FileUtil.toCanonicalPath("/../", false), FileUtil.toCanonicalPath("/../", true));
}
@Test
public void testCaseSensitivityDetection() throws IOException {
File probe = tempDir.newFile("probe");
assertEquals(SystemInfo.isFileSystemCaseSensitive, FileUtil.isFileSystemCaseSensitive(probe.getPath()));
}
@Test
public void testFileRelativePath() {
String relativePath = FileUtil.toSystemDependentName("relative/path.file");
File existingDir = tempDir.getRoot();
assertEquals(relativePath, FileUtil.getRelativePath(existingDir, new File(existingDir, relativePath)));
File notExistingDirOrFile = new File("not/existing/path");
assertEquals(relativePath, FileUtil.getRelativePath(notExistingDirOrFile, new File(notExistingDirOrFile, relativePath)));
// FileUtil.getRelativePath(File, File) should have the same behavior then FileUtil.getRelativePath(String, String, char)
File existingFile = IoTestUtil.createTestFile(existingDir, "foo.file");
assertEquals(".." + File.separatorChar + relativePath,
FileUtil.getRelativePath(existingFile, new File(existingFile.getParent(), relativePath)));
}
}
| |
// -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2009-2011 Google, All Rights reserved
// Copyright 2011-2012 MIT, All rights reserved
// Released under the MIT License https://raw.github.com/mit-cml/app-inventor/master/mitlicense.txt
package com.google.appinventor.components.runtime;
import com.google.appinventor.components.annotations.DesignerComponent;
import com.google.appinventor.components.annotations.DesignerProperty;
import com.google.appinventor.components.annotations.PropertyCategory;
import com.google.appinventor.components.annotations.SimpleEvent;
import com.google.appinventor.components.annotations.SimpleFunction;
import com.google.appinventor.components.annotations.SimpleObject;
import com.google.appinventor.components.annotations.SimpleProperty;
import com.google.appinventor.components.annotations.UsesPermissions;
import com.google.appinventor.components.common.ComponentCategory;
import com.google.appinventor.components.common.HtmlEntities;
import com.google.appinventor.components.common.PropertyTypeConstants;
import com.google.appinventor.components.common.YaVersion;
import com.google.appinventor.components.runtime.collect.Lists;
import com.google.appinventor.components.runtime.collect.Maps;
import com.google.appinventor.components.runtime.util.AsynchUtil;
import com.google.appinventor.components.runtime.util.ErrorMessages;
import com.google.appinventor.components.runtime.util.FileUtil;
import com.google.appinventor.components.runtime.util.GingerbreadUtil;
import com.google.appinventor.components.runtime.util.JsonUtil;
import com.google.appinventor.components.runtime.util.MediaUtil;
import com.google.appinventor.components.runtime.util.SdkLevel;
import com.google.appinventor.components.runtime.util.YailList;
import android.app.Activity;
import android.text.TextUtils;
import android.util.Log;
import org.json.JSONException;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.CookieHandler;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.List;
import java.util.Map;
/**
* The Web component provides functions for HTTP GET and POST requests.
*
* @author lizlooney@google.com (Liz Looney)
*/
@DesignerComponent(version = YaVersion.WEB_COMPONENT_VERSION,
description = "Non-visible component that provides functions for HTTP GET and POST requests.",
category = ComponentCategory.MISC,
nonVisible = true,
iconName = "images/web.png")
@SimpleObject
@UsesPermissions(permissionNames = "android.permission.INTERNET")
public class Web extends AndroidNonvisibleComponent implements Component {
/**
* InvalidRequestHeadersException can be thrown from processRequestHeaders.
* It is thrown if the list passed to processRequestHeaders contains an item that is not a list.
* It is thrown if the list passed to processRequestHeaders contains an item that is a list whose
* size is not 2.
*/
private static class InvalidRequestHeadersException extends Exception {
/*
* errorNumber could be:
* ErrorMessages.ERROR_WEB_REQUEST_HEADER_NOT_LIST
* ErrorMessages.ERROR_WEB_REQUEST_HEADER_NOT_TWO_ELEMENTS
*/
final int errorNumber;
final int index; // the index of the invalid header
InvalidRequestHeadersException(int errorNumber, int index) {
super();
this.errorNumber = errorNumber;
this.index = index;
}
}
/**
* BuildPostDataException can be thrown from buildPostData.
* It is thrown if the list passed to buildPostData contains an item that is not a list.
* It is thrown if the list passed to buildPostData contains an item that is a list whose size is
* not 2.
*/
// VisibleForTesting
static class BuildPostDataException extends Exception {
/*
* errorNumber could be:
* ErrorMessages.ERROR_WEB_BUILD_POST_DATA_NOT_LIST
* ErrorMessages.ERROR_WEB_BUILD_POST_DATA_NOT_TWO_ELEMENTS
*/
final int errorNumber;
final int index; // the index of the invalid header
BuildPostDataException(int errorNumber, int index) {
super();
this.errorNumber = errorNumber;
this.index = index;
}
}
/**
* The CapturedProperties class captures the current property values from a Web component before
* an asynchronous request is made. This avoids concurrency problems if the user changes a
* property value after initiating an asynchronous request.
*/
private static class CapturedProperties {
final String urlString;
final URL url;
final boolean allowCookies;
final boolean saveResponse;
final String responseFileName;
final Map<String, List<String>> requestHeaders;
final Map<String, List<String>> cookies;
CapturedProperties(Web web) throws MalformedURLException, InvalidRequestHeadersException {
urlString = web.urlString;
url = new URL(urlString);
allowCookies = web.allowCookies;
saveResponse = web.saveResponse;
responseFileName = web.responseFileName;
requestHeaders = processRequestHeaders(web.requestHeaders);
Map<String, List<String>> cookiesTemp = null;
if (allowCookies && web.cookieHandler != null) {
try {
cookiesTemp = web.cookieHandler.get(url.toURI(), requestHeaders);
} catch (URISyntaxException e) {
// Can't convert the URL to a URI; no cookies for you.
} catch (IOException e) {
// Sorry, no cookies for you.
}
}
cookies = cookiesTemp;
}
}
private static final String LOG_TAG = "Web";
private static final Map<String, String> mimeTypeToExtension;
static {
mimeTypeToExtension = Maps.newHashMap();
mimeTypeToExtension.put("application/pdf", "pdf");
mimeTypeToExtension.put("application/zip", "zip");
mimeTypeToExtension.put("audio/mpeg", "mpeg");
mimeTypeToExtension.put("audio/mp3", "mp3");
mimeTypeToExtension.put("audio/mp4", "mp4");
mimeTypeToExtension.put("image/gif", "gif");
mimeTypeToExtension.put("image/jpeg", "jpg");
mimeTypeToExtension.put("image/png", "png");
mimeTypeToExtension.put("image/tiff", "tiff");
mimeTypeToExtension.put("text/plain", "txt");
mimeTypeToExtension.put("text/html", "html");
mimeTypeToExtension.put("text/xml", "xml");
// TODO(lizlooney) - consider adding more mime types.
}
private final Activity activity;
private final CookieHandler cookieHandler;
private String urlString = "";
private boolean allowCookies;
private YailList requestHeaders = new YailList();
private boolean saveResponse;
private String responseFileName = "";
/**
* Creates a new Web component.
*
* @param container the Form that this component is contained in.
*/
public Web(ComponentContainer container) {
super(container.$form());
activity = container.$context();
cookieHandler = (SdkLevel.getLevel() >= SdkLevel.LEVEL_GINGERBREAD)
? GingerbreadUtil.newCookieManager()
: null;
}
/**
* This constructor is for testing purposes only.
*/
protected Web() {
super(null);
activity = null;
cookieHandler = null;
}
/**
* Returns the URL.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR,
description = "The URL for the web request.")
public String Url() {
return urlString;
}
/**
* Specifies the URL.
*/
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_STRING,
defaultValue = "")
@SimpleProperty
public void Url(String url) {
urlString = url;
}
/**
* Returns the request headers.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR,
description = "The request headers, as a list of two-element sublists. The first element " +
"of each sublist represents the request header field name. The second element of each " +
"sublist represents the request header field values, either a single value or a list " +
"containing multiple values.")
public YailList RequestHeaders() {
return requestHeaders;
}
/**
* Sets the request headers.
*
* @param list a list of two-element sublists, each representing a header name and values
*/
@SimpleProperty
public void RequestHeaders(YailList list) {
// Call processRequestHeaders to validate the list parameter before setting the requestHeaders
// field.
try {
processRequestHeaders(list);
requestHeaders = list;
} catch (InvalidRequestHeadersException e) {
form.dispatchErrorOccurredEvent(this, "RequestHeaders", e.errorNumber, e.index);
}
}
/**
* Returns whether cookies should be allowed
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR,
description = "Whether the cookies from a response should be saved and used in subsequent " +
"requests. Cookies are only supported on Android version 2.3 or greater.")
public boolean AllowCookies() {
return allowCookies;
}
/**
* Specifies whether cookies should be allowed
*/
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_BOOLEAN,
defaultValue = "false")
@SimpleProperty
public void AllowCookies(boolean allowCookies) {
this.allowCookies = allowCookies;
if (allowCookies && cookieHandler == null) {
form.dispatchErrorOccurredEvent(this, "AllowCookies",
ErrorMessages.ERROR_FUNCTIONALITY_NOT_SUPPORTED_WEB_COOKIES);
}
}
/**
* Returns whether the response should be saved in a file.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR,
description = "Whether the response should be saved in a file.")
public boolean SaveResponse() {
return saveResponse;
}
/**
* Specifies whether the response should be saved in a file.
*/
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_BOOLEAN,
defaultValue = "false")
@SimpleProperty
public void SaveResponse(boolean saveResponse) {
this.saveResponse = saveResponse;
}
/**
* Returns the name of the file where the response should be saved.
* If SaveResponse is true and ResponseFileName is empty, then a new file
* name will be generated.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR,
description = "The name of the file where the response should be saved. If SaveResponse " +
"is true and ResponseFileName is empty, then a new file name will be generated.")
public String ResponseFileName() {
return responseFileName;
}
/**
* Specifies the name of the file where the response should be saved.
* If SaveResponse is true and ResponseFileName is empty, then a new file
* name will be generated.
*/
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_STRING,
defaultValue = "")
@SimpleProperty
public void ResponseFileName(String responseFileName) {
this.responseFileName = responseFileName;
}
@SimpleFunction(description = "Clears all cookies for this Web component.")
public void ClearCookies() {
if (cookieHandler != null) {
GingerbreadUtil.clearCookies(cookieHandler);
} else {
form.dispatchErrorOccurredEvent(this, "ClearCookies",
ErrorMessages.ERROR_FUNCTIONALITY_NOT_SUPPORTED_WEB_COOKIES);
}
}
/**
* Performs an HTTP GET request using the Url property and retrieves the
* response.<br>
* If the SaveResponse property is true, the response will be saved in a file
* and the GotFile event will be triggered. The ResponseFileName property
* can be used to specify the name of the file.<br>
* If the SaveResponse property is false, the GotText event will be
* triggered.
*/
@SimpleFunction
public void Get() {
// Capture property values in local variables before running asynchronously.
final CapturedProperties webProps = capturePropertyValues("Get");
if (webProps == null) {
// capturePropertyValues has already called form.dispatchErrorOccurredEvent
return;
}
AsynchUtil.runAsynchronously(new Runnable() {
@Override
public void run() {
try {
performRequest(webProps, null, null);
} catch (FileUtil.FileException e) {
form.dispatchErrorOccurredEvent(Web.this, "Get",
e.getErrorMessageNumber());
} catch (Exception e) {
form.dispatchErrorOccurredEvent(Web.this, "Get",
ErrorMessages.ERROR_WEB_UNABLE_TO_GET, webProps.urlString);
}
}
});
}
/**
* Performs an HTTP POST request using the Url property and the specified text.
*
* @param text the text data for the POST request
*/
@SimpleFunction(description = "Performs an HTTP POST request using the Url property and " +
"the specified text.<br>" +
"The characters of the text are encoded using UTF-8 encoding.<br>" +
"If the SaveResponse property is true, the response will be saved in a file and the " +
"GotFile event will be triggered. The responseFileName property can be used to specify " +
"the name of the file.<br>" +
"If the SaveResponse property is false, the GotText event will be triggered.")
public void PostText(final String text) {
postTextImpl(text, "UTF-8", "PostText");
}
/**
* Performs an HTTP POST request using the Url property and the specified text.
*
* @param text the text data for the POST request
* @param encoding the character encoding to use when sending the text. If
* encoding is empty or null, UTF-8 encoding will be used.
*/
@SimpleFunction(description = "Performs an HTTP POST request using the Url property and " +
"the specified text.<br>" +
"The characters of the text are encoded using the given encoding.<br>" +
"If the SaveResponse property is true, the response will be saved in a file and the " +
"GotFile event will be triggered. The ResponseFileName property can be used to specify " +
"the name of the file.<br>" +
"If the SaveResponse property is false, the GotText event will be triggered.")
public void PostTextWithEncoding(final String text, final String encoding) {
postTextImpl(text, encoding, "PostTextWithEncoding");
}
/*
* Performs an HTTP POST request using the Url property and the specified
* text, and retrieves the response asynchronously.<br>
* The characters of the text are encoded using the given encoding.<br>
* If the SaveResponse property is true, the response will be saved in a file
* and the GotFile event will be triggered. The ResponseFileName property
* can be used to specify the name of the file.<br>
* If the SaveResponse property is false, the GotText event will be
* triggered.
*
* @param text the text data for the POST request
* @param encoding the character encoding to use when sending the text. If
* encoding is empty or null, UTF-8 encoding will be used.
* @param functionName the name of the function, used when dispatching errors
*/
private void postTextImpl(final String text, final String encoding, final String functionName) {
// Capture property values before running asynchronously.
final CapturedProperties webProps = capturePropertyValues(functionName);
if (webProps == null) {
// capturePropertyValues has already called form.dispatchErrorOccurredEvent
return;
}
AsynchUtil.runAsynchronously(new Runnable() {
@Override
public void run() {
// Convert text to bytes using the encoding.
byte[] postData;
try {
if (encoding == null || encoding.length() == 0) {
postData = text.getBytes("UTF-8");
} else {
postData = text.getBytes(encoding);
}
} catch (UnsupportedEncodingException e) {
form.dispatchErrorOccurredEvent(Web.this, functionName,
ErrorMessages.ERROR_WEB_UNSUPPORTED_ENCODING, encoding);
return;
}
try {
performRequest(webProps, postData, null);
} catch (FileUtil.FileException e) {
form.dispatchErrorOccurredEvent(Web.this, functionName,
e.getErrorMessageNumber());
} catch (Exception e) {
form.dispatchErrorOccurredEvent(Web.this, functionName,
ErrorMessages.ERROR_WEB_UNABLE_TO_POST, text, webProps.urlString);
}
}
});
}
/**
* Performs an HTTP POST request using the Url property and data from the
* specified file, and retrieves the response.
*
* @param path the path of the file for the POST request
*/
@SimpleFunction(description = "Performs an HTTP POST request using the Url property and " +
"data from the specified file.<br>" +
"If the SaveResponse property is true, the response will be saved in a file and the " +
"GotFile event will be triggered. The ResponseFileName property can be used to specify " +
"the name of the file.<br>" +
"If the SaveResponse property is false, the GotText event will be triggered.")
public void PostFile(final String path) {
// Capture property values before running asynchronously.
final CapturedProperties webProps = capturePropertyValues("PostFile");
if (webProps == null) {
// capturePropertyValues has already called form.dispatchErrorOccurredEvent
return;
}
AsynchUtil.runAsynchronously(new Runnable() {
@Override
public void run() {
try {
performRequest(webProps, null, path);
} catch (FileUtil.FileException e) {
form.dispatchErrorOccurredEvent(Web.this, "PostFile",
e.getErrorMessageNumber());
} catch (Exception e) {
form.dispatchErrorOccurredEvent(Web.this, "PostFile",
ErrorMessages.ERROR_WEB_UNABLE_TO_POST_FILE, path, webProps.urlString);
}
}
});
}
/**
* Event indicating that a request has finished.
*
* @param url the URL used for the request
* @param responseCode the response code from the server
* @param responseType the mime type of the response
* @param responseContent the response content from the server
*/
@SimpleEvent
public void GotText(String url, int responseCode, String responseType, String responseContent) {
// invoke the application's "GotText" event handler.
EventDispatcher.dispatchEvent(this, "GotText", url, responseCode, responseType,
responseContent);
}
/**
* Event indicating that a request has finished.
*
* @param url the URL used for the request
* @param responseCode the response code from the server
* @param responseType the mime type of the response
* @param fileName the full path name of the saved file
*/
@SimpleEvent
public void GotFile(String url, int responseCode, String responseType, String fileName) {
// invoke the application's "GotFile" event handler.
EventDispatcher.dispatchEvent(this, "GotFile", url, responseCode, responseType, fileName);
}
/**
* Converts a list of two-element sublists, representing name and value pairs, to a
* string formatted as application/x-www-form-urlencoded media type, suitable to pass to
* PostText.
*
* @param list a list of two-element sublists representing name and value pairs
*/
@SimpleFunction
public String BuildPostData(YailList list) {
try {
return buildPostData(list);
} catch (BuildPostDataException e) {
form.dispatchErrorOccurredEvent(this, "BuildPostData", e.errorNumber, e.index);
return "";
}
}
/*
* Converts a list of two-element sublists, representing name and value pairs, to a
* string formatted as application/x-www-form-urlencoded media type, suitable to pass to
* PostText.
*
* @param list a list of two-element sublists representing name and value pairs
* @throws BuildPostDataException if the list is not valid
*/
// VisibleForTesting
String buildPostData(YailList list) throws BuildPostDataException {
StringBuilder sb = new StringBuilder();
String delimiter = "";
for (int i = 0; i < list.size(); i++) {
Object item = list.getObject(i);
// Each item must be a two-element sublist.
if (item instanceof YailList) {
YailList sublist = (YailList) item;
if (sublist.size() == 2) {
// The first element is the name.
String name = sublist.getObject(0).toString();
// The second element is the value.
String value = sublist.getObject(1).toString();
sb.append(delimiter).append(UriEncode(name)).append('=').append(UriEncode(value));
} else {
throw new BuildPostDataException(
ErrorMessages.ERROR_WEB_BUILD_POST_DATA_NOT_TWO_ELEMENTS, i + 1);
}
} else {
throw new BuildPostDataException(ErrorMessages.ERROR_WEB_BUILD_POST_DATA_NOT_LIST, i + 1);
}
delimiter = "&";
}
return sb.toString();
}
/**
* Encodes the given text value so that it can be used in a URL.
*
* @param text the text to encode
* @return the encoded text
*/
@SimpleFunction
public String UriEncode(String text) {
try {
return URLEncoder.encode(text, "UTF-8");
} catch (UnsupportedEncodingException e) {
// If UTF-8 is not supported, we're in big trouble!
// According to Javadoc and Android documentation for java.nio.charset.Charset, UTF-8 is
// available on every Java implementation.
Log.e(LOG_TAG, "UTF-8 is unsupported?", e);
return "";
}
}
/**
* Decodes the given JSON encoded value to produce a corresponding AppInventor value.
* A JSON list [x, y, z] decodes to a list (x y z), A JSON object with name A and value B,
* (denoted as A:B enclosed in curly braces) decodes to a list
* ((A B)), that is, a list containing the two-element list (A B).
*
* @param jsonText the JSON text to decode
* @return the decoded text
*/
@SimpleFunction
// This returns an object, which in general will be a Java ArrayList, String, Boolean, Integer,
// or Double.
// The object will be sanitized to produce the corresponding Yail data by call-component-method.
// That mechanism would need to be extended if we ever change JSON decoding to produce
// dictionaries rather than lists
public Object JsonTextDecode(String jsonText) {
try {
return decodeJsonText(jsonText);
} catch (IllegalArgumentException e) {
form.dispatchErrorOccurredEvent(this, "JsonTextDecode",
ErrorMessages.ERROR_WEB_JSON_TEXT_DECODE_FAILED, jsonText);
return "";
}
}
/**
* Decodes the given JSON encoded value.
*
* @param jsonText the JSON text to decode
* @return the decoded object
* @throws IllegalArgumentException if the JSON text can't be decoded
*/
// VisibleForTesting
static Object decodeJsonText(String jsonText) throws IllegalArgumentException {
try {
return JsonUtil.getObjectFromJson(jsonText);
} catch (JSONException e) {
throw new IllegalArgumentException("jsonText is not a legal JSON value");
}
}
/**
* Decodes the given HTML text value.
*
* <pre>
* HTML Character Entities such as &, <, >, ', and " are
* changed to &, <, >, ', and ".
* Entities such as &#xhhhh, and &#nnnn are changed to the appropriate characters.
* </pre>
*
* @param htmlText the HTML text to decode
* @return the decoded text
*/
@SimpleFunction(description = "Decodes the given HTML text value. HTML character entities " +
"such as &amp;, &lt;, &gt;, &apos;, and &quot; are changed to " +
"&, <, >, ', and ". Entities such as &#xhhhh, and &#nnnn " +
"are changed to the appropriate characters.")
public String HtmlTextDecode(String htmlText) {
try {
return HtmlEntities.decodeHtmlText(htmlText);
} catch (IllegalArgumentException e) {
form.dispatchErrorOccurredEvent(this, "HtmlTextDecode",
ErrorMessages.ERROR_WEB_HTML_TEXT_DECODE_FAILED, htmlText);
return "";
}
}
/*
* Perform a HTTP GET or POST request.
* This method is always run on a different thread than the event thread. It does not use any
* property value fields because the properties may be changed while it is running. Instead, it
* uses the parameters.
* If either postData or postFile is non-null, then a post request is performed.
* If both postData and postFile are non-null, postData takes precedence over postFile.
* If postData and postFile are both null, then a get request is performed.
* If saveResponse is true, the response will be saved in a file and the GotFile event will be
* triggered. responseFileName specifies the name of the file.
* If saveResponse is false, the GotText event will be triggered.
*
* This method can throw an IOException. The caller is responsible for catching it and
* triggering the appropriate error event.
*
* @param webProps the captured property values needed for the request
* @param postData the data for the post request if it is not coming from a file, can be null
* @param postFile the path of the file containing data for the post request if it is coming from
* a file, can be null
*
* @throws IOException
*/
private void performRequest(final CapturedProperties webProps, byte[] postData, String postFile)
throws IOException {
// Open the connection.
HttpURLConnection connection = openConnection(webProps);
if (connection != null) {
try {
if (postData != null) {
writePostData(connection, postData);
} else if (postFile != null) {
writePostFile(connection, postFile);
}
// Get the response.
final int responseCode = connection.getResponseCode();
final String responseType = getResponseType(connection);
processResponseCookies(connection);
if (saveResponse) {
final String path = saveResponseContent(connection, webProps.responseFileName,
responseType);
// Dispatch the event.
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
GotFile(webProps.urlString, responseCode, responseType, path);
}
});
} else {
final String responseContent = getResponseContent(connection);
// Dispatch the event.
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
GotText(webProps.urlString, responseCode, responseType, responseContent);
}
});
}
} finally {
connection.disconnect();
}
}
}
private static HttpURLConnection openConnection(CapturedProperties webProps)
throws IOException, ClassCastException {
HttpURLConnection connection = (HttpURLConnection) webProps.url.openConnection();
// Request Headers
for (Map.Entry<String, List<String>> header : webProps.requestHeaders.entrySet()) {
String name = header.getKey();
for (String value : header.getValue()) {
connection.addRequestProperty(name, value);
}
}
// Cookies
if (webProps.cookies != null) {
for (Map.Entry<String, List<String>> cookie : webProps.cookies.entrySet()) {
String name = cookie.getKey();
for (String value : cookie.getValue()) {
connection.addRequestProperty(name, value);
}
}
}
return connection;
}
private static void writePostData(HttpURLConnection connection, byte[] postData)
throws IOException {
// According to the documentation at
// http://developer.android.com/reference/java/net/HttpURLConnection.html
// HttpURLConnection uses the GET method by default. It will use POST if setDoOutput(true) has
// been called.
connection.setDoOutput(true); // This makes it an HTTP POST.
// Write the data.
connection.setFixedLengthStreamingMode(postData.length);
BufferedOutputStream out = new BufferedOutputStream(connection.getOutputStream());
try {
out.write(postData, 0, postData.length);
out.flush();
} finally {
out.close();
}
}
private void writePostFile(HttpURLConnection connection, String path)
throws IOException {
// Use MediaUtil.openMedia to open the file. This means that path could be file on the SD card,
// an asset, a contact picture, etc.
BufferedInputStream in = new BufferedInputStream(MediaUtil.openMedia(form, path));
try {
// Write the file's data.
// According to the documentation at
// http://developer.android.com/reference/java/net/HttpURLConnection.html
// HttpURLConnection uses the GET method by default. It will use POST if setDoOutput(true) has
// been called.
connection.setDoOutput(true); // This makes it an HTTP POST.
connection.setChunkedStreamingMode(0);
BufferedOutputStream out = new BufferedOutputStream(connection.getOutputStream());
try {
while (true) {
int b = in.read();
if (b == -1) {
break;
}
out.write(b);
}
out.flush();
} finally {
out.close();
}
} finally {
in.close();
}
}
private static String getResponseType(HttpURLConnection connection) {
String responseType = connection.getContentType();
return (responseType != null) ? responseType : "";
}
private void processResponseCookies(HttpURLConnection connection) {
if (allowCookies && cookieHandler != null) {
try {
Map<String, List<String>> headerFields = connection.getHeaderFields();
cookieHandler.put(connection.getURL().toURI(), headerFields);
} catch (URISyntaxException e) {
// Can't convert the URL to a URI; no cookies for you.
} catch (IOException e) {
// Sorry, no cookies for you.
}
}
}
private static String getResponseContent(HttpURLConnection connection) throws IOException {
// Use the content encoding to convert bytes to characters.
String encoding = connection.getContentEncoding();
if (encoding == null) {
encoding = "UTF-8";
}
InputStreamReader reader = new InputStreamReader(getConnectionStream(connection), encoding);
try {
int contentLength = connection.getContentLength();
StringBuilder sb = (contentLength != -1)
? new StringBuilder(contentLength)
: new StringBuilder();
char[] buf = new char[1024];
int read;
while ((read = reader.read(buf)) != -1) {
sb.append(buf, 0, read);
}
return sb.toString();
} finally {
reader.close();
}
}
private static String saveResponseContent(HttpURLConnection connection,
String responseFileName, String responseType) throws IOException {
File file = createFile(responseFileName, responseType);
BufferedInputStream in = new BufferedInputStream(getConnectionStream(connection), 0x1000);
try {
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(file), 0x1000);
try {
// Copy the contents from the input stream to the output stream.
while (true) {
int b = in.read();
if (b == -1) {
break;
}
out.write(b);
}
out.flush();
} finally {
out.close();
}
} finally {
in.close();
}
return file.getAbsolutePath();
}
private static InputStream getConnectionStream(HttpURLConnection connection) {
// According to the Android reference documentation for HttpURLConnection: If the HTTP response
// indicates that an error occurred, getInputStream() will throw an IOException. Use
// getErrorStream() to read the error response.
try {
return connection.getInputStream();
} catch (IOException e1) {
// Use the error response.
return connection.getErrorStream();
}
}
private static File createFile(String fileName, String responseType)
throws IOException, FileUtil.FileException {
// If a fileName was specified, use it.
if (!TextUtils.isEmpty(fileName)) {
return FileUtil.getExternalFile(fileName);
}
// Otherwise, try to determine an appropriate file extension from the responseType.
// The response type could contain extra information that we don't need. For example, it might
// be "text/html; charset=ISO-8859-1". We just want to look at the part before the semicolon.
int indexOfSemicolon = responseType.indexOf(';');
if (indexOfSemicolon != -1) {
responseType = responseType.substring(0, indexOfSemicolon);
}
String extension = mimeTypeToExtension.get(responseType);
if (extension == null) {
extension = "tmp";
}
return FileUtil.getDownloadFile(extension);
}
/*
* Converts request headers (a YailList) into the structure that can be used with the Java API
* (a Map<String, List<String>>). If the request headers contains an invalid element, an
* InvalidRequestHeadersException will be thrown.
*/
private static Map<String, List<String>> processRequestHeaders(YailList list)
throws InvalidRequestHeadersException {
Map<String, List<String>> requestHeadersMap = Maps.newHashMap();
for (int i = 0; i < list.size(); i++) {
Object item = list.getObject(i);
// Each item must be a two-element sublist.
if (item instanceof YailList) {
YailList sublist = (YailList) item;
if (sublist.size() == 2) {
// The first element is the request header field name.
String fieldName = sublist.getObject(0).toString();
// The second element contains the request header field values.
Object fieldValues = sublist.getObject(1);
// Build an entry (key and values) for the requestHeadersMap.
String key = fieldName;
List<String> values = Lists.newArrayList();
// If there is just one field value, it is specified as a single non-list item (for
// example, it can be a text value). If there are multiple field values, they are
// specified as a list.
if (fieldValues instanceof YailList) {
// It's a list. There are multiple field values.
YailList multipleFieldsValues = (YailList) fieldValues;
for (int j = 0; j < multipleFieldsValues.size(); j++) {
Object value = multipleFieldsValues.getObject(j);
values.add(value.toString());
}
} else {
// It's a single non-list item. There is just one field value.
Object singleFieldValue = fieldValues;
values.add(singleFieldValue.toString());
}
// Put the entry into the requestHeadersMap.
requestHeadersMap.put(key, values);
} else {
// The sublist doesn't contain two elements.
throw new InvalidRequestHeadersException(
ErrorMessages.ERROR_WEB_REQUEST_HEADER_NOT_TWO_ELEMENTS, i + 1);
}
} else {
// The item isn't a sublist.
throw new InvalidRequestHeadersException(
ErrorMessages.ERROR_WEB_REQUEST_HEADER_NOT_LIST, i + 1);
}
}
return requestHeadersMap;
}
/*
* Captures the current property values that are needed for an HTTP request. If an error occurs
* while validating the Url or RequestHeaders property values, this method calls
* form.dispatchErrorOccurredEvent and returns null.
*
* @param functionName the name of the function, used when dispatching errors
*/
private CapturedProperties capturePropertyValues(String functionName) {
try {
return new CapturedProperties(this);
} catch (MalformedURLException e) {
form.dispatchErrorOccurredEvent(this, functionName,
ErrorMessages.ERROR_WEB_MALFORMED_URL, urlString);
} catch (InvalidRequestHeadersException e) {
form.dispatchErrorOccurredEvent(this, functionName, e.errorNumber, e.index);
}
return null;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.Transport;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
public class FetchSearchPhaseTests extends ESTestCase {
public void testShortcutQueryAndFetchOptimization() {
SearchPhaseController controller = new SearchPhaseController(
(b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b));
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1);
ArraySearchPhaseResults<SearchPhaseResult> results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 1);
boolean hasHits = randomBoolean();
final int numHits;
if (hasHits) {
QuerySearchResult queryResult = new QuerySearchResult();
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 1.0F), new DocValueFormat[0]);
queryResult.size(1);
FetchSearchResult fetchResult = new FetchSearchResult();
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F));
QueryFetchSearchResult fetchSearchResult = new QueryFetchSearchResult(queryResult, fetchResult);
fetchSearchResult.setShardIndex(0);
results.consumeResult(fetchSearchResult);
numHits = 1;
} else {
numHits = 0;
}
FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext,
(searchResponse, scrollId) -> new SearchPhase("test") {
@Override
public void run() {
mockSearchPhaseContext.sendSearchResponse(searchResponse, null);
}
});
assertEquals("fetch", phase.getName());
phase.run();
mockSearchPhaseContext.assertNoFailure();
SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get();
assertNotNull(searchResponse);
assertEquals(numHits, searchResponse.getHits().getTotalHits().value);
if (numHits != 0) {
assertEquals(42, searchResponse.getHits().getAt(0).docId());
}
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
}
public void testFetchTwoDocument() {
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
SearchPhaseController controller = new SearchPhaseController(
(b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b));
ArraySearchPhaseResults<SearchPhaseResult> results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2);
int resultSetSize = randomIntBetween(2, 10);
QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new ShardId("test", "na", 0),
null, OriginalIndices.NONE));
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
queryResult.size(resultSetSize); // the size of the result set
queryResult.setShardIndex(0);
results.consumeResult(queryResult);
queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE));
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
queryResult.size(resultSetSize);
queryResult.setShardIndex(1);
results.consumeResult(queryResult);
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) {
@Override
public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task,
SearchActionListener<FetchSearchResult> listener) {
FetchSearchResult fetchResult = new FetchSearchResult();
if (request.id() == 321) {
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)},
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F));
} else {
assertEquals(123, request.id());
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)},
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F));
}
listener.onResponse(fetchResult);
}
};
FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext,
(searchResponse, scrollId) -> new SearchPhase("test") {
@Override
public void run() {
mockSearchPhaseContext.sendSearchResponse(searchResponse, null);
}
});
assertEquals("fetch", phase.getName());
phase.run();
mockSearchPhaseContext.assertNoFailure();
SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get();
assertNotNull(searchResponse);
assertEquals(2, searchResponse.getHits().getTotalHits().value);
assertEquals(84, searchResponse.getHits().getAt(0).docId());
assertEquals(42, searchResponse.getHits().getAt(1).docId());
assertEquals(0, searchResponse.getFailedShards());
assertEquals(2, searchResponse.getSuccessfulShards());
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
}
public void testFailFetchOneDoc() {
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
SearchPhaseController controller = new SearchPhaseController(
(b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b));
ArraySearchPhaseResults<SearchPhaseResult> results =
controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2);
int resultSetSize = randomIntBetween(2, 10);
QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new ShardId("test", "na", 0),
null, OriginalIndices.NONE));
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
queryResult.size(resultSetSize); // the size of the result set
queryResult.setShardIndex(0);
results.consumeResult(queryResult);
queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE));
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
queryResult.size(resultSetSize);
queryResult.setShardIndex(1);
results.consumeResult(queryResult);
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) {
@Override
public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task,
SearchActionListener<FetchSearchResult> listener) {
if (request.id() == 321) {
FetchSearchResult fetchResult = new FetchSearchResult();
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)},
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F));
listener.onResponse(fetchResult);
} else {
listener.onFailure(new MockDirectoryWrapper.FakeIOException());
}
}
};
FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext,
(searchResponse, scrollId) -> new SearchPhase("test") {
@Override
public void run() {
mockSearchPhaseContext.sendSearchResponse(searchResponse, null);
}
});
assertEquals("fetch", phase.getName());
phase.run();
mockSearchPhaseContext.assertNoFailure();
SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get();
assertNotNull(searchResponse);
assertEquals(2, searchResponse.getHits().getTotalHits().value);
assertEquals(84, searchResponse.getHits().getAt(0).docId());
assertEquals(1, searchResponse.getFailedShards());
assertEquals(1, searchResponse.getSuccessfulShards());
assertEquals(1, searchResponse.getShardFailures().length);
assertTrue(searchResponse.getShardFailures()[0].getCause() instanceof MockDirectoryWrapper.FakeIOException);
assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size());
assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(123L));
}
public void testFetchDocsConcurrently() throws InterruptedException {
int resultSetSize = randomIntBetween(0, 100);
// we use at least 2 hits otherwise this is subject to single shard optimization and we trip an assert...
int numHits = randomIntBetween(2, 100); // also numshards --> 1 hit per shard
SearchPhaseController controller = new SearchPhaseController(
(b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b));
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits);
ArraySearchPhaseResults<SearchPhaseResult> results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), numHits);
for (int i = 0; i < numHits; i++) {
QuerySearchResult queryResult = new QuerySearchResult(i, new SearchShardTarget("node1", new ShardId("test", "na", 0),
null, OriginalIndices.NONE));
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(i+1, i)}), i), new DocValueFormat[0]);
queryResult.size(resultSetSize); // the size of the result set
queryResult.setShardIndex(i);
results.consumeResult(queryResult);
}
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) {
@Override
public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task,
SearchActionListener<FetchSearchResult> listener) {
new Thread(() -> {
FetchSearchResult fetchResult = new FetchSearchResult();
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit((int) (request.id()+1))},
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 100F));
listener.onResponse(fetchResult);
}).start();
}
};
CountDownLatch latch = new CountDownLatch(1);
FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext,
(searchResponse, scrollId) -> new SearchPhase("test") {
@Override
public void run() {
mockSearchPhaseContext.sendSearchResponse(searchResponse, null);
latch.countDown();
}
});
assertEquals("fetch", phase.getName());
phase.run();
latch.await();
mockSearchPhaseContext.assertNoFailure();
SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get();
assertNotNull(searchResponse);
assertEquals(numHits, searchResponse.getHits().getTotalHits().value);
assertEquals(Math.min(numHits, resultSetSize), searchResponse.getHits().getHits().length);
SearchHit[] hits = searchResponse.getHits().getHits();
for (int i = 0; i < hits.length; i++) {
assertNotNull(hits[i]);
assertEquals("index: " + i, numHits-i, hits[i].docId());
assertEquals("index: " + i, numHits-1-i, (int)hits[i].getScore());
}
assertEquals(0, searchResponse.getFailedShards());
assertEquals(numHits, searchResponse.getSuccessfulShards());
int sizeReleasedContexts = Math.max(0, numHits - resultSetSize); // all non fetched results will be freed
assertEquals(mockSearchPhaseContext.releasedSearchContexts.toString(),
sizeReleasedContexts, mockSearchPhaseContext.releasedSearchContexts.size());
}
public void testExceptionFailsPhase() {
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
SearchPhaseController controller = new SearchPhaseController(
(b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b));
ArraySearchPhaseResults<SearchPhaseResult> results =
controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2);
int resultSetSize = randomIntBetween(2, 10);
QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new ShardId("test", "na", 0),
null, OriginalIndices.NONE));
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
queryResult.size(resultSetSize); // the size of the result set
queryResult.setShardIndex(0);
results.consumeResult(queryResult);
queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE));
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
queryResult.size(resultSetSize);
queryResult.setShardIndex(1);
results.consumeResult(queryResult);
AtomicInteger numFetches = new AtomicInteger(0);
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) {
@Override
public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task,
SearchActionListener<FetchSearchResult> listener) {
FetchSearchResult fetchResult = new FetchSearchResult();
if (numFetches.incrementAndGet() == 1) {
throw new RuntimeException("BOOM");
}
if (request.id() == 321) {
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)},
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F));
} else {
assertEquals(request, 123);
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)},
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F));
}
listener.onResponse(fetchResult);
}
};
FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext,
(searchResponse, scrollId) -> new SearchPhase("test") {
@Override
public void run() {
mockSearchPhaseContext.sendSearchResponse(searchResponse, null);
}
});
assertEquals("fetch", phase.getName());
phase.run();
assertNotNull(mockSearchPhaseContext.phaseFailure.get());
assertEquals(mockSearchPhaseContext.phaseFailure.get().getMessage(), "BOOM");
assertNull(mockSearchPhaseContext.searchResponse.get());
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
}
public void testCleanupIrrelevantContexts() { // contexts that are not fetched should be cleaned up
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
SearchPhaseController controller = new SearchPhaseController(
(b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b));
ArraySearchPhaseResults<SearchPhaseResult> results =
controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2);
int resultSetSize = 1;
QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new ShardId("test", "na", 0),
null, OriginalIndices.NONE));
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
queryResult.size(resultSetSize); // the size of the result set
queryResult.setShardIndex(0);
results.consumeResult(queryResult);
queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE));
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
queryResult.size(resultSetSize);
queryResult.setShardIndex(1);
results.consumeResult(queryResult);
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) {
@Override
public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task,
SearchActionListener<FetchSearchResult> listener) {
FetchSearchResult fetchResult = new FetchSearchResult();
if (request.id() == 321) {
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)},
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F));
} else {
fail("requestID 123 should not be fetched but was");
}
listener.onResponse(fetchResult);
}
};
FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext,
(searchResponse, scrollId) -> new SearchPhase("test") {
@Override
public void run() {
mockSearchPhaseContext.sendSearchResponse(searchResponse, null);
}
});
assertEquals("fetch", phase.getName());
phase.run();
mockSearchPhaseContext.assertNoFailure();
SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get();
assertNotNull(searchResponse);
assertEquals(2, searchResponse.getHits().getTotalHits().value);
assertEquals(1, searchResponse.getHits().getHits().length);
assertEquals(84, searchResponse.getHits().getAt(0).docId());
assertEquals(0, searchResponse.getFailedShards());
assertEquals(2, searchResponse.getSuccessfulShards());
assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size());
assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(123L));
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.client.tools;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
import org.apache.airavata.model.appcatalog.appdeployment.CommandObject;
import org.apache.airavata.model.appcatalog.appinterface.*;
import org.apache.airavata.model.appcatalog.computeresource.*;
import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
import org.apache.airavata.model.application.io.DataType;
import org.apache.airavata.model.application.io.InputDataObjectType;
import org.apache.airavata.model.application.io.OutputDataObjectType;
import org.apache.airavata.model.data.movement.DataMovementInterface;
import org.apache.airavata.model.data.movement.DataMovementProtocol;
import org.apache.airavata.model.parallelism.ApplicationParallelismType;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class RegisterSampleApplicationsUtils {
public static ComputeResourcePreference
createComputeResourcePreference(String computeResourceId, String allocationProjectNumber,
boolean overridebyAiravata, String preferredBatchQueue,
JobSubmissionProtocol preferredJobSubmissionProtocol,
DataMovementProtocol preferredDataMovementProtocol,
String scratchLocation) {
ComputeResourcePreference computeResourcePreference = new ComputeResourcePreference();
computeResourcePreference.setComputeResourceId(computeResourceId);
computeResourcePreference.setOverridebyAiravata(overridebyAiravata);
computeResourcePreference.setAllocationProjectNumber(allocationProjectNumber);
computeResourcePreference.setPreferredBatchQueue(preferredBatchQueue);
computeResourcePreference.setPreferredDataMovementProtocol(preferredDataMovementProtocol);
computeResourcePreference.setPreferredJobSubmissionProtocol(preferredJobSubmissionProtocol);
computeResourcePreference.setScratchLocation(scratchLocation);
return computeResourcePreference;
}
public static ApplicationDeploymentDescription createApplicationDeployment(String appModuleId,
String computeResourceId,
String executablePath,
ApplicationParallelismType parallelism,
String appDeploymentDescription,
List<CommandObject> moduleLoadCmds,
List<CommandObject> preJobCmds,
List<CommandObject> postJobCmds) {
ApplicationDeploymentDescription deployment = new ApplicationDeploymentDescription();
// deployment.setIsEmpty(false);
deployment.setAppDeploymentDescription(appDeploymentDescription);
deployment.setAppModuleId(appModuleId);
deployment.setComputeHostId(computeResourceId);
deployment.setExecutablePath(executablePath);
deployment.setParallelism(parallelism);
deployment.setModuleLoadCmds(moduleLoadCmds);
deployment.setPreJobCommands(preJobCmds);
deployment.setPostJobCommands(postJobCmds);
return deployment;
}
public static ApplicationModule createApplicationModule(String appModuleName,
String appModuleVersion, String appModuleDescription) {
ApplicationModule module = new ApplicationModule();
module.setAppModuleDescription(appModuleDescription);
module.setAppModuleName(appModuleName);
module.setAppModuleVersion(appModuleVersion);
return module;
}
public static DataMovementInterface createDataMovementInterface(
String dataMovementInterfaceId,
DataMovementProtocol dataMovementProtocolType, int priorityOrder) {
DataMovementInterface dataMovementInterface = new DataMovementInterface();
dataMovementInterface.setDataMovementInterfaceId(dataMovementInterfaceId);
dataMovementInterface.setDataMovementProtocol(dataMovementProtocolType);
dataMovementInterface.setPriorityOrder(priorityOrder);
return dataMovementInterface;
}
public static JobSubmissionInterface createJobSubmissionInterface(
String jobSubmissionInterfaceId,
JobSubmissionProtocol jobSubmissionProtocolType, int priorityOrder) {
JobSubmissionInterface jobSubmissionInterface = new JobSubmissionInterface();
jobSubmissionInterface.setJobSubmissionInterfaceId(jobSubmissionInterfaceId);
jobSubmissionInterface.setJobSubmissionProtocol(jobSubmissionProtocolType);
jobSubmissionInterface.setPriorityOrder(priorityOrder);
return jobSubmissionInterface;
}
public static ComputeResourceDescription createComputeResourceDescription(
String hostName, String hostDesc, List<String> hostAliases, List<String> ipAddresses) {
ComputeResourceDescription host = new ComputeResourceDescription();
host.setHostName(hostName);
host.setResourceDescription(hostDesc);
host.setIpAddresses(ipAddresses);
host.setHostAliases(hostAliases);
return host;
}
public static ResourceJobManager createResourceJobManager(
ResourceJobManagerType resourceJobManagerType, String pushMonitoringEndpoint, String jobManagerBinPath,
Map<JobManagerCommand, String> jobManagerCommands) {
ResourceJobManager resourceJobManager = new ResourceJobManager();
resourceJobManager.setResourceJobManagerType(resourceJobManagerType);
resourceJobManager.setPushMonitoringEndpoint(pushMonitoringEndpoint);
resourceJobManager.setJobManagerBinPath(jobManagerBinPath);
resourceJobManager.setJobManagerCommands(jobManagerCommands);
return resourceJobManager;
}
public static InputDataObjectType createAppInput (String inputName,
String value,
DataType type,
String applicationArgument,
int order,
boolean isRequired,
boolean requiredToCMD,
boolean stdIn,
String description,
String metadata) {
InputDataObjectType input = new InputDataObjectType();
// input.setIsEmpty(false);
if (inputName != null) input.setName(inputName);
if (value != null) input.setValue(value);
if (type != null) input.setType(type);
if (applicationArgument != null) input.setApplicationArgument(applicationArgument);
input.setInputOrder(order);
input.setIsRequired(isRequired);
input.setRequiredToAddedToCommandLine(requiredToCMD);
if (description != null) input.setUserFriendlyDescription(description);
input.setStandardInput(stdIn);
if (metadata != null) input.setMetaData(metadata);
return input;
}
public static OutputDataObjectType createAppOutput(String inputName,
String value,
DataType type,
boolean isRequired,
boolean requiredToCMD,
String argument) {
OutputDataObjectType outputDataObjectType = new OutputDataObjectType();
if (inputName != null) outputDataObjectType.setName(inputName);
if (value != null) outputDataObjectType.setValue(value);
if (type != null) outputDataObjectType.setType(type);
outputDataObjectType.setIsRequired(isRequired);
outputDataObjectType.setRequiredToAddedToCommandLine(requiredToCMD);
outputDataObjectType.setApplicationArgument(argument);
return outputDataObjectType;
}
public static ApplicationInterfaceDescription createApplicationInterfaceDescription
(String applicationName, String applicationDescription, List<String> applicationModules,
List<InputDataObjectType> applicationInputs, List<OutputDataObjectType>applicationOutputs) {
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationName(applicationName);
if (applicationDescription != null) applicationInterfaceDescription.setApplicationDescription(applicationDescription);
if (applicationModules != null) applicationInterfaceDescription.setApplicationModules(applicationModules);
if (applicationInputs != null) applicationInterfaceDescription.setApplicationInputs(applicationInputs);
if (applicationOutputs != null) applicationInterfaceDescription.setApplicationOutputs(applicationOutputs);
return applicationInterfaceDescription;
}
}
| |
package edu.jhu.pacaya.autodiff;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Stack;
public class Toposort {
public interface Deps<T> {
Collection<T> getDeps(T x);
}
private Toposort() {
// Private constructor.
}
/**
* Gets a topological sort for the graph.
*
* @param root The root of the graph.
* @param deps Functional description of the graph's dependencies.
* @return The topological sort.
*/
public static <T> List<T> toposort(T root, Deps<T> deps) {
List<T> order = new ArrayList<T>();
HashSet<T> done = new HashSet<T>();
Stack<T> todo = new Stack<T>();
HashSet<T> ancestors = new HashSet<T>();
// Run a Tarjan (1976) style topological sort.
todo.push(root);
while (!todo.isEmpty()) {
T x = todo.peek();
// Whether all x's descendents are done.
boolean ready = true;
for (T y : deps.getDeps(x)) {
if (!done.contains(y)) {
ready = false;
todo.push(y);
}
}
if (ready) {
todo.pop();
ancestors.remove(x);
if (done.add(x)) {
order.add(x);
}
} else {
if (ancestors.contains(x)) {
throw new IllegalStateException("Graph is not a DAG. Cycle involves node: " + x);
}
ancestors.add(x);
}
}
return order;
}
/**
* Gets a topological sort for the graph, where the depth-first search is cutoff by an input set.
*
* @param inputs The input set which is excluded from the graph.
* @param root The root of the graph.
* @param deps Functional description of the graph's dependencies.
* @param isFullCut Whether the input set is a full cut of the graph.
* @return The topological sort.
*/
public static <T> List<T> toposort(List<T> inputs,
T root, final Deps<T> deps, boolean isFullCut) {
// Get inputs as a set.
final HashSet<T> inputSet = new HashSet<T>(inputs);
if (inputSet.size() != inputs.size()) {
throw new IllegalStateException("Multiple copies of module in inputs list: " + inputs);
}
return toposort(inputSet, root, deps, isFullCut);
}
/**
* Gets a topological sort for the graph, where the depth-first search is cutoff by an input set.
*
* @param inputSet The input set which is excluded from the graph.
* @param root The root of the graph.
* @param deps Functional description of the graph's dependencies.
* @param isFullCut Whether the input set is a full cut of the graph.
* @return The topological sort.
*/
public static <T> List<T> toposort(final Set<T> inputSet, T root, final Deps<T> deps, boolean isFullCut) {
// Check that inputs set is a valid set of leaves for the given output module.
checkAreDescendentsOf(inputSet, root, deps);
if (isFullCut) {
checkIsFullCut(inputSet, root, deps);
}
Deps<T> cutoffDeps = getCutoffDeps(inputSet, deps);
return Toposort.toposort(root, cutoffDeps);
}
/** Gets a new Deps graph where each node in the input set is removed from the graph. */
public static <T> Deps<T> getCutoffDeps(final Set<T> inputSet, final Deps<T> deps) {
Deps<T> cutoffDeps = new Deps<T>() {
@Override
public Collection<T> getDeps(T x) {
HashSet<T> pruned = new HashSet<T>(deps.getDeps(x));
pruned.removeAll(inputSet);
return pruned;
}
};
return cutoffDeps;
}
/**
* Checks that the given inputSet consists of only descendents of the root.
*/
public static <T> void checkAreDescendentsOf(Set<T> inputSet, T root, Deps<T> deps) {
// Check that all modules in the input set are descendents of the output module.
HashSet<T> visited = new HashSet<T>();
dfs(root, visited, deps);
if (!visited.containsAll(inputSet)) {
throw new IllegalStateException("Input set contains modules which are not descendents of the output module: " + inputSet);
}
}
/**
* Checks that the given inputSet defines a full cut through the graph rooted at the given root.
*/
public static <T> void checkIsFullCut(Set<T> inputSet, T root, Deps<T> deps) {
// Check that the input set defines a full cut through the graph with outMod as root.
HashSet<T> visited = new HashSet<T>();
// Mark the inputSet as visited. If it is a valid leaf set, then leaves will be empty upon
// completion of the DFS.
visited.addAll(inputSet);
HashSet<T> leaves = dfs(root, visited, deps);
if (leaves.size() != 0) {
throw new IllegalStateException("Input set is not a valid leaf set for the given output module. Extra leaves: " + leaves);
}
}
/**
* Depth-first search starting at the given output node.
*
* @param root The root node.
* @param visited The set of visited nodes. Upon completion this set will contain every node
* that was visited during this run of depth-first-search.
* @return The set of leaf nodes.
*/
// TODO: detect cycles.
public static <T> HashSet<T> dfs(T root, Set<T> visited, Deps<T> deps) {
// The set of leaves (excluding any which were already marked as visited).
HashSet<T> leaves = new HashSet<T>();
// The stack for DFS.
Stack<T> stack = new Stack<T>();
stack.push(root);
while (stack.size() > 0) {
T p = stack.pop();
if (visited.add(p)) {
// Unseen.
if (deps.getDeps(p).size() == 0) {
// Is leaf.
leaves.add(p);
} else {
// Not a leaf.
stack.addAll(deps.getDeps(p));
}
} else {
// Seen.
continue;
}
}
return leaves;
}
/** Gets the leaves in DFS order. */
public static <T> HashSet<T> getLeaves(T root, Deps<T> deps) {
return dfs(root, new HashSet<T>(), deps);
}
public static <T> Set<T> getImmediateParents(Set<T> inputs, T root, Deps<T> deps) {
HashSet<T> visited = new HashSet<T>();
HashSet<T> leaves = Toposort.dfs(root, visited, Toposort.getCutoffDeps(inputs, deps));
if (inputs.size() == 0) {
return leaves;
}
HashSet<T> parents = new HashSet<T>();
for (T x : visited) {
for(T y : deps.getDeps(x)) {
if (inputs.contains(y)) {
parents.add(x);
break;
}
}
}
return parents;
}
//
// private static <T> List<T> getChildrenAsList(Set<T> leaves, Deps<T> deps) {
// HashSet<T> children = new HashSet<T>();
// for (T x : leaves) {
// for (T y : deps.getDeps(x)) {
// if (!leaves.contains(y)) {
// children.add(y);
// }
// }
// }
// return new ArrayList<T>(children);
// }
}
| |
package com.ternsip.structpro.structure;
import com.ternsip.structpro.universe.blocks.UBlock;
import com.ternsip.structpro.universe.blocks.UBlockPos;
import com.ternsip.structpro.universe.blocks.UBlockState;
import com.ternsip.structpro.universe.blocks.UBlocks;
import com.ternsip.structpro.universe.entities.Tiles;
import com.ternsip.structpro.universe.utils.Utils;
import com.ternsip.structpro.universe.world.UWorld;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraftforge.common.util.Constants;
import java.io.File;
import java.io.IOException;
import java.util.Random;
/**
* Schematic - Classical Minecraft schematic storage
* Provide controls for schematic
* @author Ternsip
*/
@SuppressWarnings({"WeakerAccess"})
public class Blueprint extends Volume implements Schema {
/** Tag file size limit in bytes */
private static final long TAG_FILE_SIZE_LIMIT = 1024 * 1024 * 16;
/** Block ID array */
private short[] blocks;
/** Block metadata array */
private byte[] metas;
/** Tag array */
private NBTTagCompound[] tiles;
/** Empty constructor */
Blueprint() {}
/**
* Construct from extracted world part
* @param world World instance
* @param start Starting position
* @param volume Volume dimensions
*/
public Blueprint(UWorld world, UBlockPos start, Volume volume) {
super(volume);
int size = getSize();
setBlocks(new short[size]);
setMetas(new byte[size]);
setTiles(new NBTTagCompound[size]);
Posture posture = getPosture(start.getX(), start.getY(), start.getZ(), 0, 0, 0, false, false, false);
for (int ix = 0; ix < getWidth(); ++ix) {
for (int iy = 0; iy < getHeight(); ++iy) {
for (int iz = 0; iz < getLength(); ++iz) {
UBlockPos pos = posture.getWorldPos(ix, iy, iz);
UBlockState state = world.getBlockState(pos);
int blockID = state.getID();
int index = getIndex(ix, iy, iz);
setBlock((short) blockID, index);
setMeta((byte) state.getMeta(), index);
setTile(world.getTileTag(pos), index);
if (getTile(index) != null) {
getTile(index).setInteger("x", ix);
getTile(index).setInteger("y", iy);
getTile(index).setInteger("z", iz);
}
}
}
}
}
/**
* Load schematic from file
* @param file File to load
* @throws IOException If schematic can not be loaded
*/
public void loadSchematic(File file) throws IOException {
if (file.length() > TAG_FILE_SIZE_LIMIT) {
throw new IOException("File is too large: " + file.length());
}
readSchematic(Utils.readTags(file));
}
/**
* Save as schematic to file
* @param file Destination file
* @throws IOException If schematic can not be saved
*/
public void saveSchematic(File file) throws IOException {
Utils.writeTags(file, getSchematic());
}
/**
* Read from schematic tag
* @param tag Control tag
* @throws IOException If schematic tag can not be read
*/
private void readSchematic(NBTTagCompound tag) throws IOException {
String materials = tag.getString("Materials");
if (!materials.equals("Alpha")) {
throw new IOException("Materials of schematic is not an alpha: [" + materials + "]");
}
setWidth(tag.getShort("Width"));
setHeight(tag.getShort("Height"));
setLength(tag.getShort("Length"));
String dimensions = "[W=" + getWidth() + ";H=" + getHeight() + ";L=" + getLength() + "]";
String dimLimit = "[W=" + WIDTH_LIMIT + ";H=" + HEIGHT_LIMIT + ";L=" + LENGTH_LIMIT + "]";
if (getWidth() <= 0 || getHeight() <= 0 || getLength() <= 0) {
throw new IOException("Schematic has non-positive dimensions: " + dimensions);
}
if (getWidth() > WIDTH_LIMIT || getHeight() > HEIGHT_LIMIT || getLength() > LENGTH_LIMIT) {
throw new IOException("Schematic dimensions are too large: " + dimensions + "/" + dimLimit);
}
int size = getSize();
if (size > VOLUME_LIMIT) {
throw new IOException("Schematic is too big: " + size + "/" + VOLUME_LIMIT);
}
byte[] addBlocks = tag.getByteArray("AddBlocks");
byte[] blocksID = tag.getByteArray("Blocks");
if (size != blocksID.length) {
throw new IOException("Wrong schematic blocks length: " + blocksID.length + "/" + size);
}
setBlocks(compose(blocksID, addBlocks));
setMetas(tag.getByteArray("Data"));
if (size != getMetas().length) {
throw new IOException("Wrong schematic metadata length: " + blocksID.length + "/" + size);
}
setTiles(new NBTTagCompound[size]);
NBTTagList tileEntities = tag.getTagList("TileEntities", Constants.NBT.TAG_COMPOUND);
for(int i = 0; i < tileEntities.tagCount(); i++) {
NBTTagCompound tile = tileEntities.getCompoundTagAt(i);
int x = tile.getInteger("x");
int y = tile.getInteger("y");
int z = tile.getInteger("z");
int idx = getIndex(x, y, z);
if (idx >= 0 && idx < size) {
setTile(tile, idx);
}
}
}
/**
* Write to schematic tags
* @return Control tag
*/
private NBTTagCompound getSchematic() {
NBTTagCompound tag = new NBTTagCompound();
tag.setString("Materials", "Alpha");
tag.setShort("Width", (short) getWidth());
tag.setShort("Height", (short) getHeight());
tag.setShort("Length", (short) getLength());
tag.setByteArray("AddBlocks", new byte[0]);
byte[] blocksID = new byte[getBlocks().length];
for (int i = 0; i < getBlocks().length; ++i) {
blocksID[i] = (byte) getBlock(i);
}
tag.setByteArray("Blocks", blocksID);
tag.setByteArray("AddBlocks", getAddBlocks(getBlocks()));
tag.setByteArray("Data", getMetas());
NBTTagList tileEntities = new NBTTagList();
for (NBTTagCompound tile : getTiles()) {
if (tile != null) {
tileEntities.appendTag(tile);
}
}
tag.setTag("TileEntities", tileEntities);
return tag;
}
/**
* Combine all 8b-blocksID and 8b-addBlocks to 16b-block
* @param blocksID Vanilla block array
* @param addBlocks Additional postfix array
* @return Combined array of vanilla and additional blocks
*/
private static short[] compose(byte[] blocksID, byte[] addBlocks) {
short[] blocks = new short[blocksID.length];
for (int index = 0; index < blocksID.length; index++) {
if ((index >> 1) >= addBlocks.length) {
blocks[index] = (short) (blocksID[index] & 0xFF);
} else {
if ((index & 1) == 0) {
blocks[index] = (short) (((addBlocks[index >> 1] & 0x0F) << 8) + (blocksID[index] & 0xFF));
} else {
blocks[index] = (short) (((addBlocks[index >> 1] & 0xF0) << 4) + (blocksID[index] & 0xFF));
}
}
}
return blocks;
}
/**
* Decompose to 8b-addBlocks from 16b-block id
* @param blocksID Blocks id array
* @return Decomposed array to AddBlocks
*/
private static byte[] getAddBlocks(short[] blocksID) {
byte[] addBlocks = new byte[(blocksID.length >> 1) + 1];
for (int index = 0; index < blocksID.length; ++index) {
short block = blocksID[index];
if (block > 255) {
addBlocks[index >> 1] = (byte) (((index & 1) == 0) ? addBlocks[index >> 1] & 0xF0 | (block >> 8) & 0xF : addBlocks[index >> 1] & 0xF | ((block >> 8) & 0xF) << 4);
}
}
return addBlocks;
}
@Override
public void project(UWorld world, Posture posture, long seed, boolean isInsecure) throws IOException {
Random random = new Random(0);
for (int ix = 0; ix < getWidth(); ++ix) {
for (int iy = 0; iy < getHeight(); ++iy) {
for (int iz = 0; iz < getLength(); ++iz) {
project(world, posture, getIndex(ix, iy, iz), isInsecure, random);
}
}
}
world.notifyPosture(posture);
}
@Override
public void project(UWorld world, Posture posture, int index, boolean isInsecure, Random random) {
UBlock block = isInsecure ? UBlock.getById(getBlock(index)) : UBlocks.getBlockVanilla(getBlock(index));
if (block == null) {
return;
}
UBlockPos pos = posture.getWorldPos(index);
world.setBlockState(pos, block.getState(posture.getWorldMeta(block, getMeta(index))));
if (isInsecure) {
world.setTileTag(pos, getTile(index));
} else {
Tiles.load(world.getTileEntity(pos), getTile(index), random.nextLong());
}
}
protected short[] getBlocks() {
return blocks;
}
protected byte[] getMetas() {
return metas;
}
protected NBTTagCompound[] getTiles() {
return tiles;
}
protected short getBlock(int index) {
return this.blocks[index];
}
protected byte getMeta(int index) {
return this.metas[index];
}
protected NBTTagCompound getTile(int index) {
return this.tiles[index];
}
protected void setBlocks(short[] blocks) {
this.blocks = blocks;
}
protected void setMetas(byte[] meta) {
this.metas = meta;
}
protected void setTiles(NBTTagCompound[] tiles) {
this.tiles = tiles;
}
protected void setBlock(short block, int index) {
this.blocks[index] = block;
}
protected void setMeta(byte meta, int index) {
this.metas[index] = meta;
}
protected void setTile(NBTTagCompound tile, int index) {
this.tiles[index] = tile;
}
}
| |
/* jSSC (Java Simple Serial Connector) - serial port communication library.
* (C) Alexey Sokolov (scream3r), 2010-2014.
*
* Patched for Arduino by Cristian Maglie.
*
* This file is part of jSSC.
*
* jSSC is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* jSSC is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with jSSC. If not, see <http://www.gnu.org/licenses/>.
*
* If you use jSSC in public project you can inform me about this by e-mail,
* of course if you want it.
*
* e-mail: scream3r.org@gmail.com
* web-site: http://scream3r.org | http://code.google.com/p/java-simple-serial-connector/
*/
package ar.edu.utn.tc.alarma.arduino;
import java.io.File;
import java.util.Comparator;
import java.util.TreeSet;
import java.util.regex.Pattern;
import jssc.SerialNativeInterface;
/**
*
* @author scream3r
*/
public class SerialPortList {
private static SerialNativeInterface serialInterface;
private static final Pattern PORTNAMES_REGEXP;
private static final String PORTNAMES_PATH;
static {
serialInterface = new SerialNativeInterface();
switch (SerialNativeInterface.getOsType()) {
case SerialNativeInterface.OS_LINUX: {
PORTNAMES_REGEXP = Pattern.compile("(ttyS|ttyUSB|ttyACM|ttyAMA|rfcomm|ttyO)[0-9]{1,3}");
PORTNAMES_PATH = "/dev/";
break;
}
case SerialNativeInterface.OS_SOLARIS: {
PORTNAMES_REGEXP = Pattern.compile("[0-9]*|[a-z]*");
PORTNAMES_PATH = "/dev/term/";
break;
}
case SerialNativeInterface.OS_MAC_OS_X: {
PORTNAMES_REGEXP = Pattern.compile("(tty|cu)\\..*");
PORTNAMES_PATH = "/dev/";
break;
}
case SerialNativeInterface.OS_WINDOWS: {
PORTNAMES_REGEXP = Pattern.compile("");
PORTNAMES_PATH = "";
break;
}
default: {
PORTNAMES_REGEXP = null;
PORTNAMES_PATH = null;
break;
}
}
}
//since 2.1.0 -> Fully rewrited port name comparator
private static final Comparator<String> PORTNAMES_COMPARATOR = new Comparator<String>() {
@Override
public int compare(String valueA, String valueB) {
if (valueA.equalsIgnoreCase(valueB)) {
return valueA.compareTo(valueB);
}
int minLength = Math.min(valueA.length(), valueB.length());
int shiftA = 0;
int shiftB = 0;
for (int i = 0; i < minLength; i++) {
char charA = valueA.charAt(i - shiftA);
char charB = valueB.charAt(i - shiftB);
if (charA != charB) {
if (Character.isDigit(charA) && Character.isDigit(charB)) {
int[] resultsA = getNumberAndLastIndex(valueA, i - shiftA);
int[] resultsB = getNumberAndLastIndex(valueB, i - shiftB);
if (resultsA[0] != resultsB[0]) {
return resultsA[0] - resultsB[0];
}
if (valueA.length() < valueB.length()) {
i = resultsA[1];
shiftB = resultsA[1] - resultsB[1];
} else {
i = resultsB[1];
shiftA = resultsB[1] - resultsA[1];
}
} else {
if (Character.toLowerCase(charA) - Character.toLowerCase(charB) != 0) {
return Character.toLowerCase(charA) - Character.toLowerCase(charB);
}
}
}
}
return valueA.compareToIgnoreCase(valueB);
}
/**
* Evaluate port <b>index/number</b> from <b>startIndex</b> to the number end. For example: for port name
* <b>serial-123-FF</b> you should invoke this method with <b>startIndex = 7</b>
*
* @return If port <b>index/number</b> correctly evaluated it value will be returned<br>
* <b>returnArray[0] = index/number</b><br>
* <b>returnArray[1] = stopIndex</b><br>
*
* If incorrect:<br>
* <b>returnArray[0] = -1</b><br>
* <b>returnArray[1] = startIndex</b><br>
*
* For this name <b>serial-123-FF</b> result is:
* <b>returnArray[0] = 123</b><br>
* <b>returnArray[1] = 10</b><br>
*/
private int[] getNumberAndLastIndex(String str, int startIndex) {
String numberValue = "";
int[] returnValues = {-1, startIndex};
for (int i = startIndex; i < str.length(); i++) {
returnValues[1] = i;
char c = str.charAt(i);
if (Character.isDigit(c)) {
numberValue += c;
} else {
break;
}
}
try {
returnValues[0] = Integer.valueOf(numberValue);
} catch (Exception ex) {
//Do nothing
}
return returnValues;
}
};
//<-since 2.1.0
/**
* Get sorted array of serial ports in the system using default settings:<br>
*
* <b>Search path</b><br>
* Windows - ""(always ignored)<br>
* Linux - "/dev/"<br>
* Solaris - "/dev/term/"<br>
* MacOSX - "/dev/"<br>
*
* <b>RegExp</b><br>
* Windows - ""<br>
* Linux - "(ttyS|ttyUSB|ttyACM|ttyAMA|rfcomm)[0-9]{1,3}"<br>
* Solaris - "[0-9]*|[a-z]*"<br>
* MacOSX - "tty.(serial|usbserial|usbmodem).*"<br>
*
* @return String array. If there is no ports in the system String[] with <b>zero</b> length will be returned (since
* jSSC-0.8 in previous versions null will be returned)
*/
public static String[] getPortNames() {
return getPortNames(PORTNAMES_PATH, PORTNAMES_REGEXP, PORTNAMES_COMPARATOR);
}
/**
* Get sorted array of serial ports in the system located on searchPath
*
* @param searchPath Path for searching serial ports <b>(not null)</b><br>
* The default search paths:<br>
* Linux, MacOSX: <b>/dev/</b><br>
* Solaris: <b>/dev/term/</b><br>
* Windows: <b>this parameter ingored</b>
*
* @return String array. If there is no ports in the system String[]
*
* @since 2.3.0
*/
public static String[] getPortNames(String searchPath) {
return getPortNames(searchPath, PORTNAMES_REGEXP, PORTNAMES_COMPARATOR);
}
/**
* Get sorted array of serial ports in the system matched pattern
*
* @param pattern RegExp pattern for matching port names <b>(not null)</b>
*
* @return String array. If there is no ports in the system String[]
*
* @since 2.3.0
*/
public static String[] getPortNames(Pattern pattern) {
return getPortNames(PORTNAMES_PATH, pattern, PORTNAMES_COMPARATOR);
}
/**
* Get sorted array of serial ports in the system matched pattern
*
* @param comparator Comparator for sotring port names <b>(not null)</b>
*
* @return String array. If there is no ports in the system String[]
*
* @since 2.3.0
*/
public static String[] getPortNames(Comparator<String> comparator) {
return getPortNames(PORTNAMES_PATH, PORTNAMES_REGEXP, comparator);
}
/**
* Get sorted array of serial ports in the system located on searchPath, matched pattern
*
* @param searchPath Path for searching serial ports <b>(not null)</b><br>
* The default search paths:<br>
* Linux, MacOSX: <b>/dev/</b><br>
* Solaris: <b>/dev/term/</b><br>
* Windows: <b>this parameter ingored</b>
* @param pattern RegExp pattern for matching port names <b>(not null)</b>
*
* @return String array. If there is no ports in the system String[]
*
* @since 2.3.0
*/
public static String[] getPortNames(String searchPath, Pattern pattern) {
return getPortNames(searchPath, pattern, PORTNAMES_COMPARATOR);
}
/**
* Get sorted array of serial ports in the system located on searchPath and sorted by comparator
*
* @param searchPath Path for searching serial ports <b>(not null)</b><br>
* The default search paths:<br>
* Linux, MacOSX: <b>/dev/</b><br>
* Solaris: <b>/dev/term/</b><br>
* Windows: <b>this parameter ingored</b>
* @param comparator Comparator for sotring port names <b>(not null)</b>
*
* @return String array. If there is no ports in the system String[]
*
* @since 2.3.0
*/
public static String[] getPortNames(String searchPath, Comparator<String> comparator) {
return getPortNames(searchPath, PORTNAMES_REGEXP, comparator);
}
/**
* Get sorted array of serial ports in the system matched pattern and sorted by comparator
*
* @param pattern RegExp pattern for matching port names <b>(not null)</b>
* @param comparator Comparator for sotring port names <b>(not null)</b>
*
* @return String array. If there is no ports in the system String[]
*
* @since 2.3.0
*/
public static String[] getPortNames(Pattern pattern, Comparator<String> comparator) {
return getPortNames(PORTNAMES_PATH, pattern, comparator);
}
/**
* Get sorted array of serial ports in the system located on searchPath, matched pattern and sorted by comparator
*
* @param searchPath Path for searching serial ports <b>(not null)</b><br>
* The default search paths:<br>
* Linux, MacOSX: <b>/dev/</b><br>
* Solaris: <b>/dev/term/</b><br>
* Windows: <b>this parameter ingored</b>
* @param pattern RegExp pattern for matching port names <b>(not null)</b>
* @param comparator Comparator for sotring port names <b>(not null)</b>
*
* @return String array. If there is no ports in the system String[]
*
* @since 2.3.0
*/
public static String[] getPortNames(String searchPath, Pattern pattern, Comparator<String> comparator) {
if (searchPath == null || pattern == null || comparator == null) {
return new String[]{};
}
if (SerialNativeInterface.getOsType() == SerialNativeInterface.OS_WINDOWS) {
return getWindowsPortNames(pattern, comparator);
}
return getUnixBasedPortNames(searchPath, pattern, comparator);
}
/**
* Get serial port names in Windows
*
* @since 2.3.0
*/
private static String[] getWindowsPortNames(Pattern pattern, Comparator<String> comparator) {
String[] portNames = serialInterface.getSerialPortNames();
if (portNames == null) {
return new String[]{};
}
TreeSet<String> ports = new TreeSet<String>(comparator);
for (String portName : portNames) {
if (pattern.matcher(portName).find()) {
ports.add(portName);
}
}
return ports.toArray(new String[ports.size()]);
}
/**
* Universal method for getting port names of _nix based systems
*/
private static String[] getUnixBasedPortNames(String searchPath, Pattern pattern, Comparator<String> comparator) {
searchPath = (searchPath.equals("") ? searchPath : (searchPath.endsWith("/") ? searchPath : searchPath + "/"));
String[] returnArray = new String[]{};
File dir = new File(searchPath);
if (dir.exists() && dir.isDirectory()) {
File[] files = dir.listFiles();
if (files.length > 0) {
TreeSet<String> portsTree = new TreeSet<String>(comparator);
for (File file : files) {
String fileName = file.getName();
if (!file.isDirectory() && !file.isFile() && pattern.matcher(fileName).find()) {
String portName = searchPath + fileName;
// For linux ttyS0..31 serial ports check existence by opening each of them
if (fileName.startsWith("ttyS")) {
long portHandle = serialInterface.openPort(portName, false);//Open port without TIOCEXCL
if (portHandle < 0 && portHandle != SerialNativeInterface.ERR_PORT_BUSY) {
continue;
} else if (portHandle != SerialNativeInterface.ERR_PORT_BUSY) {
serialInterface.closePort(portHandle);
}
}
portsTree.add(portName);
}
}
returnArray = portsTree.toArray(returnArray);
}
}
return returnArray;
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.fileEditor.impl;
import com.intellij.diagnostic.Activity;
import com.intellij.diagnostic.ActivityCategory;
import com.intellij.diagnostic.StartUpMeasurer;
import com.intellij.diagnostic.StartUpMeasurer.Activities;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.ui.UISettingsListener;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.fileEditor.impl.text.FileDropHandler;
import com.intellij.openapi.keymap.Keymap;
import com.intellij.openapi.keymap.KeymapManagerListener;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.*;
import com.intellij.openapi.wm.ex.IdeFocusTraversalPolicy;
import com.intellij.openapi.wm.ex.IdeFrameEx;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.openapi.wm.impl.*;
import com.intellij.testFramework.LightVirtualFileBase;
import com.intellij.ui.ComponentUtil;
import com.intellij.ui.DirtyUI;
import com.intellij.ui.JBColor;
import com.intellij.ui.OnePixelSplitter;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.docking.DockManager;
import com.intellij.ui.tabs.JBTabs;
import com.intellij.ui.tabs.impl.JBTabsImpl;
import com.intellij.ui.tabs.impl.tabsLayout.TabsLayoutInfo;
import com.intellij.util.Alarm;
import com.intellij.util.ObjectUtils;
import com.intellij.util.PathUtil;
import com.intellij.util.containers.ArrayListSet;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.StartupUiUtil;
import com.intellij.util.ui.UIUtil;
import gnu.trove.THashSet;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.event.ContainerEvent;
import java.beans.PropertyChangeListener;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.*;
import java.util.concurrent.CopyOnWriteArraySet;
import static com.intellij.openapi.wm.ToolWindowId.PROJECT_VIEW;
@DirtyUI
public class EditorsSplitters extends IdePanePanel implements UISettingsListener {
private static final Key<Activity> OPEN_FILES_ACTIVITY = Key.create("open.files.activity");
private static final Logger LOG = Logger.getInstance(EditorsSplitters.class);
private static final String PINNED = "pinned";
private static final String CURRENT_IN_TAB = "current-in-tab";
private static final Key<Object> DUMMY_KEY = Key.create("EditorsSplitters.dummy.key");
private static final Key<Boolean> OPENED_IN_BULK = Key.create("EditorSplitters.opened.in.bulk");
private EditorWindow myCurrentWindow;
private final Set<EditorWindow> myWindows = new CopyOnWriteArraySet<>();
private final FileEditorManagerImpl myManager;
private Element mySplittersElement; // temporarily used during initialization
int myInsideChange;
private final MyFocusWatcher myFocusWatcher;
private final Alarm myIconUpdaterAlarm;
final Disposable parentDisposable;
private final UIBuilder myUIBuilder = new UIBuilder();
EditorsSplitters(@NotNull FileEditorManagerImpl manager, boolean createOwnDockableContainer, @NotNull Disposable parentDisposable) {
super(new BorderLayout());
myIconUpdaterAlarm = new Alarm(parentDisposable);
this.parentDisposable = parentDisposable;
setBackground(JBColor.namedColor("Editor.background", IdeBackgroundUtil.getIdeBackgroundColor()));
PropertyChangeListener l = e -> {
String propName = e.getPropertyName();
if ("Editor.background".equals(propName) || "Editor.foreground".equals(propName) || "Editor.shortcutForeground".equals(propName)) {
repaint();
}
};
UIManager.getDefaults().addPropertyChangeListener(l);
Disposer.register(parentDisposable, () -> UIManager.getDefaults().removePropertyChangeListener(l));
myManager = manager;
myFocusWatcher = new MyFocusWatcher();
Disposer.register(parentDisposable, () -> {
myFocusWatcher.deinstall(this);
});
setFocusTraversalPolicy(new MyFocusTraversalPolicy());
setTransferHandler(new MyTransferHandler());
clear();
if (createOwnDockableContainer) {
DockableEditorTabbedContainer dockable = new DockableEditorTabbedContainer(myManager.getProject(), this, false);
DockManager.getInstance(manager.getProject()).register(dockable, parentDisposable);
}
ApplicationManager.getApplication().getMessageBus().connect(parentDisposable).subscribe(KeymapManagerListener.TOPIC, new KeymapManagerListener() {
@Override
public void activeKeymapChanged(@Nullable Keymap keymap) {
invalidate();
repaint();
}
});
}
public FileEditorManagerImpl getManager() {
return myManager;
}
public void clear() {
for (EditorWindow window : myWindows) {
window.dispose();
}
removeAll();
myWindows.clear();
setCurrentWindow(null);
repaint (); // revalidate doesn't repaint correctly after "Close All"
}
void startListeningFocus() {
myFocusWatcher.install(this);
}
@Nullable
public VirtualFile getCurrentFile() {
if (myCurrentWindow != null) {
return myCurrentWindow.getSelectedFile();
}
return null;
}
private boolean showEmptyText() {
return myCurrentWindow == null || myCurrentWindow.getFiles().length == 0;
}
@Override
protected void paintComponent(Graphics g) {
if (showEmptyText()) {
Graphics2D gg = IdeBackgroundUtil.withFrameBackground(g, this);
super.paintComponent(gg);
g.setColor(StartupUiUtil.isUnderDarcula() ? JBColor.border() : new Color(0, 0, 0, 50));
g.drawLine(0, 0, getWidth(), 0);
}
}
public void writeExternal(@NotNull Element element) {
if (getComponentCount() == 0) {
return;
}
JPanel panel = (JPanel)getComponent(0);
if (panel.getComponentCount() != 0) {
try {
element.addContent(writePanel(panel.getComponent(0)));
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Throwable e) {
LOG.error(e);
}
}
}
private Element writePanel(@NotNull Component comp) {
if (comp instanceof Splitter) {
final Splitter splitter = (Splitter)comp;
final Element res = new Element("splitter");
res.setAttribute("split-orientation", splitter.getOrientation() ? "vertical" : "horizontal");
res.setAttribute("split-proportion", Float.toString(splitter.getProportion()));
final Element first = new Element("split-first");
first.addContent(writePanel(splitter.getFirstComponent().getComponent(0)));
final Element second = new Element("split-second");
second.addContent(writePanel(splitter.getSecondComponent().getComponent(0)));
res.addContent(first);
res.addContent(second);
return res;
}
else if (comp instanceof JBTabs) {
Element result = new Element("leaf");
Integer limit = ComponentUtil.getClientProperty(((JBTabs)comp).getComponent(), JBTabsImpl.SIDE_TABS_SIZE_LIMIT_KEY);
if (limit != null) {
result.setAttribute(JBTabsImpl.SIDE_TABS_SIZE_LIMIT_KEY.toString(), String.valueOf(limit));
}
writeWindow(result, findWindowWith(comp));
return result;
}
else {
LOG.error(comp.getClass().getName());
return null;
}
}
private void writeWindow(@NotNull Element result, @Nullable EditorWindow window) {
if (window != null) {
EditorWithProviderComposite[] composites = window.getEditors();
for (int i = 0; i < composites.length; i++) {
VirtualFile file = window.getFileAt(i);
result.addContent(writeComposite(composites[i], window.isFilePinned(file), window.getSelectedEditor()));
}
}
}
@NotNull
private Element writeComposite(@NotNull EditorWithProviderComposite composite, boolean pinned, @Nullable EditorWithProviderComposite selectedEditor) {
Element fileElement = new Element("file");
composite.currentStateAsHistoryEntry().writeExternal(fileElement, getManager().getProject());
fileElement.setAttribute(PINNED, Boolean.toString(pinned));
fileElement.setAttribute(CURRENT_IN_TAB, Boolean.toString(composite.equals(selectedEditor)));
return fileElement;
}
@Nullable
Ref<JPanel> restoreEditors() {
Element element = mySplittersElement;
if (element == null) {
return null;
}
myManager.getProject().putUserData(OPEN_FILES_ACTIVITY, StartUpMeasurer.startActivity(Activities.EDITOR_RESTORING_TILL_PAINT));
Activity restoringEditors = StartUpMeasurer.startMainActivity(Activities.EDITOR_RESTORING);
JPanel component = myUIBuilder.process(element, getTopPanel());
if (component != null) {
component.setFocusable(false);
}
restoringEditors.end();
return new Ref<>(component);
}
public static void stopOpenFilesActivity(@NotNull Project project) {
Activity activity = project.getUserData(OPEN_FILES_ACTIVITY);
if (activity != null) {
activity.end();
project.putUserData(OPEN_FILES_ACTIVITY, null);
}
}
public void openFiles() {
Ref<JPanel> componentRef = restoreEditors();
if (componentRef == null) {
return;
}
ApplicationManager.getApplication().invokeAndWait(() -> {
doOpenFiles(componentRef.get());
}, ModalityState.any());
}
void doOpenFiles(@Nullable JPanel component) {
if (component != null) {
removeAll();
add(component, BorderLayout.CENTER);
mySplittersElement = null;
}
// clear empty splitters
for (EditorWindow window : getWindows()) {
if (window.getTabCount() == 0) {
window.removeFromSplitter();
}
}
}
public void readExternal(@NotNull Element element) {
mySplittersElement = element;
}
public VirtualFile @NotNull [] getOpenFiles() {
Set<VirtualFile> files = new ArrayListSet<>();
for (EditorWindow myWindow : myWindows) {
for (EditorWithProviderComposite editor : myWindow.getEditors()) {
files.add(editor.getFile());
}
}
return VfsUtilCore.toVirtualFileArray(files);
}
public VirtualFile @NotNull [] getSelectedFiles() {
final Set<VirtualFile> files = new ArrayListSet<>();
for (final EditorWindow window : myWindows) {
final VirtualFile file = window.getSelectedFile();
if (file != null) {
files.add(file);
}
}
final VirtualFile[] virtualFiles = VfsUtilCore.toVirtualFileArray(files);
final VirtualFile currentFile = getCurrentFile();
if (currentFile != null) {
for (int i = 0; i != virtualFiles.length; ++i) {
if (Comparing.equal(virtualFiles[i], currentFile)) {
virtualFiles[i] = virtualFiles[0];
virtualFiles[0] = currentFile;
break;
}
}
}
return virtualFiles;
}
public FileEditor @NotNull [] getSelectedEditors() {
Set<EditorWindow> windows = new THashSet<>(myWindows);
final EditorWindow currentWindow = getCurrentWindow();
if (currentWindow != null) {
windows.add(currentWindow);
}
List<FileEditor> editors = new ArrayList<>();
for (final EditorWindow window : windows) {
final EditorWithProviderComposite composite = window.getSelectedEditor();
if (composite != null) {
editors.add(composite.getSelectedEditor());
}
}
return editors.toArray(new FileEditor[0]);
}
public void updateFileIcon(@NotNull final VirtualFile file) {
updateFileIconLater(file);
}
void updateFileIconImmediately(final VirtualFile file) {
final Collection<EditorWindow> windows = findWindows(file);
for (EditorWindow window : windows) {
window.updateFileIcon(file);
}
}
private final Set<VirtualFile> myFilesToUpdateIconsFor = new HashSet<>();
private void updateFileIconLater(VirtualFile file) {
myFilesToUpdateIconsFor.add(file);
myIconUpdaterAlarm.cancelAllRequests();
myIconUpdaterAlarm.addRequest(() -> {
if (myManager.getProject().isDisposed()) return;
for (VirtualFile file1 : myFilesToUpdateIconsFor) {
updateFileIconImmediately(file1);
}
myFilesToUpdateIconsFor.clear();
}, 200, ModalityState.stateForComponent(this));
}
void updateFileColor(@NotNull final VirtualFile file) {
final Collection<EditorWindow> windows = findWindows(file);
for (final EditorWindow window : windows) {
final int index = window.findEditorIndex(window.findFileComposite(file));
LOG.assertTrue(index != -1);
window.setForegroundAt(index, getManager().getFileColor(file));
window.setWaveColor(index, getManager().isProblem(file) ? JBColor.red : null);
}
}
public void trimToSize() {
for (final EditorWindow window : myWindows) {
window.trimToSize(window.getSelectedFile(), true);
}
}
void updateTabsLayout(TabsLayoutInfo newTabsLayoutInfo) {
final EditorWindow[] windows = getWindows();
for (int i = 0; i != windows.length; ++ i) {
windows[i].updateTabsLayout(newTabsLayoutInfo);
}
}
public void setTabsPlacement(final int tabPlacement) {
final EditorWindow[] windows = getWindows();
for (int i = 0; i != windows.length; ++ i) {
windows[i].setTabsPlacement(tabPlacement);
}
}
void setTabLayoutPolicy(int scrollTabLayout) {
final EditorWindow[] windows = getWindows();
for (int i = 0; i != windows.length; ++ i) {
windows[i].setTabLayoutPolicy(scrollTabLayout);
}
}
void updateFileName(@Nullable VirtualFile updatedFile) {
for (EditorWindow window : getWindows()) {
for (VirtualFile file : window.getFiles()) {
if (updatedFile == null || file.getName().equals(updatedFile.getName())) {
window.updateFileName(file);
}
}
}
Project project = myManager.getProject();
IdeFrameEx frame = getFrame(project);
if (frame != null) {
String fileTitle = null;
Path ioFile = null;
VirtualFile file = getCurrentFile();
if (file != null) {
try {
ioFile = file instanceof LightVirtualFileBase ? null : Paths.get(file.getPresentableUrl());
}
catch (InvalidPathException error) {
// Sometimes presentable URLs, designed for showing texts in UI, aren't valid local filesystem paths.
// An error may happen not only for LightVirtualFile.
LOG.info(
String.format("Presentable URL %s of file %s can't be mapped on the local filesystem.", file.getPresentableUrl(), file),
error);
}
fileTitle = FrameTitleBuilder.getInstance().getFileTitle(project, file);
}
frame.setFileTitle(fileTitle, ioFile);
}
}
@Nullable
protected IdeFrameEx getFrame(@NotNull Project project) {
ProjectFrameHelper frame = WindowManagerEx.getInstanceEx().getFrameHelper(project);
LOG.assertTrue(ApplicationManager.getApplication().isUnitTestMode() || frame != null);
return frame;
}
boolean isInsideChange() {
return myInsideChange > 0;
}
private void setCurrentWindow(@Nullable final EditorWindow currentWindow) {
if (currentWindow != null && !myWindows.contains(currentWindow)) {
throw new IllegalArgumentException(currentWindow + " is not a member of this container");
}
myCurrentWindow = currentWindow;
}
void updateFileBackgroundColor(@NotNull VirtualFile file) {
final EditorWindow[] windows = getWindows();
for (int i = 0; i != windows.length; ++ i) {
windows [i].updateFileBackgroundColor(file);
}
}
int getSplitCount() {
if (getComponentCount() > 0) {
JPanel panel = (JPanel) getComponent(0);
return getSplitCount(panel);
}
return 0;
}
private static int getSplitCount(JComponent component) {
if (component.getComponentCount() > 0) {
final JComponent firstChild = (JComponent)component.getComponent(0);
if (firstChild instanceof Splitter) {
final Splitter splitter = (Splitter)firstChild;
return getSplitCount(splitter.getFirstComponent()) + getSplitCount(splitter.getSecondComponent());
}
return 1;
}
return 0;
}
protected void afterFileClosed(@NotNull VirtualFile file) {
}
protected void afterFileOpen(@NotNull VirtualFile file) {
}
@Nullable
JBTabs getTabsAt(RelativePoint point) {
Point thisPoint = point.getPoint(this);
Component c = SwingUtilities.getDeepestComponentAt(this, thisPoint.x, thisPoint.y);
while (c != null) {
if (c instanceof JBTabs) {
return (JBTabs)c;
}
c = c.getParent();
}
return null;
}
boolean isEmptyVisible() {
EditorWindow[] windows = getWindows();
for (EditorWindow each : windows) {
if (!each.isEmptyVisible()) {
return false;
}
}
return true;
}
@Nullable
private VirtualFile findNextFile(final VirtualFile file) {
final EditorWindow[] windows = getWindows(); // TODO: use current file as base
for (int i = 0; i != windows.length; ++i) {
final VirtualFile[] files = windows[i].getFiles();
for (final VirtualFile fileAt : files) {
if (!Comparing.equal(fileAt, file)) {
return fileAt;
}
}
}
return null;
}
void closeFile(@NotNull VirtualFile file, boolean moveFocus) {
List<EditorWindow> windows = findWindows(file);
boolean isProjectOpen = myManager.getProject().isOpen();
if (!windows.isEmpty()) {
final VirtualFile nextFile = findNextFile(file);
for (final EditorWindow window : windows) {
LOG.assertTrue(window.getSelectedEditor() != null);
window.closeFile(file, false, moveFocus);
if (window.getTabCount() == 0 && nextFile != null && isProjectOpen) {
EditorWithProviderComposite newComposite = myManager.newEditorComposite(nextFile);
window.setEditor(newComposite, moveFocus); // newComposite can be null
}
}
// cleanup windows with no tabs
for (final EditorWindow window : windows) {
if (!isProjectOpen || window.isDisposed()) {
// call to window.unsplit() which might make its sibling disposed
continue;
}
if (window.getTabCount() == 0) {
window.unsplit(false);
}
}
}
}
@Override
public void uiSettingsChanged(@NotNull UISettings uiSettings) {
for (EditorWindow window : myWindows) {
window.updateTabsVisibility(uiSettings);
}
if (!myManager.getProject().isOpen()) {
return;
}
for (VirtualFile file : getOpenFiles()) {
updateFileBackgroundColor(file);
updateFileIcon(file);
updateFileColor(file);
}
}
private final class MyFocusTraversalPolicy extends IdeFocusTraversalPolicy {
@Override
public final Component getDefaultComponent(final Container focusCycleRoot) {
if (myCurrentWindow != null) {
final EditorWithProviderComposite selectedEditor = myCurrentWindow.getSelectedEditor();
if (selectedEditor != null) {
return IdeFocusTraversalPolicy.getPreferredFocusedComponent(selectedEditor.getComponent(), this);
}
}
return IdeFocusTraversalPolicy.getPreferredFocusedComponent(EditorsSplitters.this, this);
}
@NotNull
@Override
protected Project getProject() {
return myManager.getProject();
}
}
@Nullable
public JPanel getTopPanel() {
return getComponentCount() > 0 ? (JPanel)getComponent(0) : null;
}
public EditorWindow getCurrentWindow() {
return myCurrentWindow;
}
public EditorWindow getOrCreateCurrentWindow(final VirtualFile file) {
final List<EditorWindow> windows = findWindows(file);
if (getCurrentWindow() == null) {
final Iterator<EditorWindow> iterator = myWindows.iterator();
if (!windows.isEmpty()) {
setCurrentWindow(windows.get(0), false);
}
else if (iterator.hasNext()) {
setCurrentWindow(iterator.next(), false);
}
else {
createCurrentWindow();
}
}
else if (!windows.isEmpty()) {
if (!windows.contains(getCurrentWindow())) {
setCurrentWindow(windows.get(0), false);
}
}
return getCurrentWindow();
}
void createCurrentWindow() {
LOG.assertTrue(myCurrentWindow == null);
setCurrentWindow(createEditorWindow());
add(myCurrentWindow.myPanel, BorderLayout.CENTER);
}
@NotNull
protected EditorWindow createEditorWindow() {
return new EditorWindow(this, parentDisposable);
}
/**
* sets the window passed as a current ('focused') window among all splitters. All file openings will be done inside this
* current window
* @param window a window to be set as current
* @param requestFocus whether to request focus to the editor currently selected in this window
*/
void setCurrentWindow(@Nullable EditorWindow window, boolean requestFocus) {
EditorWithProviderComposite newEditor = window == null ? null : window.getSelectedEditor();
Runnable fireRunnable = () -> getManager().fireSelectionChanged(newEditor);
setCurrentWindow(window);
getManager().updateFileName(window == null ? null : window.getSelectedFile());
if (window != null) {
EditorWithProviderComposite selectedEditor = window.getSelectedEditor();
if (selectedEditor != null) {
fireRunnable.run();
}
if (requestFocus) {
window.requestFocus(true);
}
}
else {
fireRunnable.run();
}
}
void addWindow(@NotNull EditorWindow window) {
myWindows.add(window);
}
void removeWindow(EditorWindow window) {
myWindows.remove(window);
if (myCurrentWindow == window) {
myCurrentWindow = null;
}
}
boolean containsWindow(EditorWindow window) {
return myWindows.contains(window);
}
/**
* @deprecated Use {@link #getEditorComposites()}
*/
@Deprecated
public EditorWithProviderComposite @NotNull [] getEditorsComposites() {
return getEditorComposites().toArray(new EditorWithProviderComposite[0]);
}
@NotNull
public List<EditorWithProviderComposite> getEditorComposites() {
List<EditorWithProviderComposite> result = new ArrayList<>();
for (EditorWindow myWindow : myWindows) {
ContainerUtil.addAll(result, myWindow.getEditors());
}
return result;
}
//---------------------------------------------------------
@NotNull
public List<EditorWithProviderComposite> findEditorComposites(@NotNull VirtualFile file) {
List<EditorWithProviderComposite> res = new ArrayList<>();
for (final EditorWindow window : myWindows) {
final EditorWithProviderComposite fileComposite = window.findFileComposite(file);
if (fileComposite != null) {
res.add(fileComposite);
}
}
return res;
}
@NotNull
private List<EditorWindow> findWindows(@NotNull VirtualFile file) {
List<EditorWindow> res = new ArrayList<>();
for (EditorWindow window : myWindows) {
if (window.findFileComposite(file) != null) {
res.add(window);
}
}
return res;
}
public EditorWindow @NotNull [] getWindows() {
return myWindows.toArray(new EditorWindow[0]);
}
@NotNull
List<EditorWindow> getOrderedWindows() {
List<EditorWindow> result = new ArrayList<>();
// Collector for windows in tree ordering:
class Inner {
private void collect(final JPanel panel){
final Component comp = panel.getComponent(0);
if (comp instanceof Splitter) {
final Splitter splitter = (Splitter)comp;
collect((JPanel)splitter.getFirstComponent());
collect((JPanel)splitter.getSecondComponent());
}
else if (comp instanceof JPanel || comp instanceof JBTabs) {
final EditorWindow window = findWindowWith(comp);
if (window != null) {
result.add(window);
}
}
}
}
// get root component and traverse splitters tree:
if (getComponentCount() != 0) {
final Component comp = getComponent(0);
LOG.assertTrue(comp instanceof JPanel);
final JPanel panel = (JPanel)comp;
if (panel.getComponentCount() != 0) {
new Inner().collect (panel);
}
}
LOG.assertTrue(result.size() == myWindows.size());
return result;
}
@Nullable
private EditorWindow findWindowWith(@Nullable Component component) {
if (component != null) {
for (EditorWindow window : myWindows) {
if (SwingUtilities.isDescendingFrom(component, window.myPanel)) {
return window;
}
}
}
return null;
}
public boolean isFloating() {
return false;
}
public boolean isPreview() {
return false;
}
public static boolean isOpenedInBulk(@NotNull VirtualFile file) {
return file.getUserData(OPENED_IN_BULK) != null;
}
private final class MyFocusWatcher extends FocusWatcher {
@Override
protected void focusedComponentChanged(Component component, AWTEvent cause) {
EditorWindow newWindow = null;
if (component != null) {
newWindow = findWindowWith(component);
}
else if (cause instanceof ContainerEvent && cause.getID() == ContainerEvent.COMPONENT_REMOVED) {
// do not change current window in case of child removal as in JTable.removeEditor
// otherwise Escape in a toolwindow will not focus editor with JTable content
return;
}
setCurrentWindow(newWindow);
setCurrentWindow(newWindow, false);
}
}
private final class MyTransferHandler extends TransferHandler {
private final FileDropHandler myFileDropHandler = new FileDropHandler(null);
@Override
public boolean importData(JComponent comp, Transferable t) {
if (myFileDropHandler.canHandleDrop(t.getTransferDataFlavors())) {
myFileDropHandler.handleDrop(t, myManager.getProject(), myCurrentWindow);
return true;
}
return false;
}
@Override
public boolean canImport(JComponent comp, DataFlavor[] transferFlavors) {
return myFileDropHandler.canHandleDrop(transferFlavors);
}
}
private abstract static class ConfigTreeReader<T> {
@Nullable
public final T process(@NotNull Element element, @Nullable T context) {
Element splitterElement = element.getChild("splitter");
if (splitterElement != null) {
Element first = splitterElement.getChild("split-first");
Element second = splitterElement.getChild("split-second");
return processSplitter(splitterElement, first, second, context);
}
Element leaf = element.getChild("leaf");
if (leaf == null) {
return null;
}
List<Element> fileElements = leaf.getChildren("file");
List<Element> children;
if (fileElements.isEmpty()) {
children = Collections.emptyList();
}
else {
children = new ArrayList<>(fileElements.size());
// trim to EDITOR_TAB_LIMIT, ignoring CLOSE_NON_MODIFIED_FILES_FIRST policy
int toRemove = fileElements.size() - EditorWindow.getTabLimit();
for (Element fileElement : fileElements) {
if (toRemove <= 0 || Boolean.parseBoolean(fileElement.getAttributeValue(PINNED))) {
children.add(fileElement);
}
else {
toRemove--;
}
}
}
return processFiles(children, StringUtil.parseInt(leaf.getAttributeValue(JBTabsImpl.SIDE_TABS_SIZE_LIMIT_KEY.toString()), -1), context);
}
@Nullable
abstract T processFiles(@NotNull List<? extends Element> fileElements, int tabSizeLimit, @Nullable T context);
@Nullable
abstract T processSplitter(@NotNull Element element, @Nullable Element firstChild, @Nullable Element secondChild, @Nullable T context);
}
private final class UIBuilder extends ConfigTreeReader<JPanel> {
@Override
protected JPanel processFiles(@NotNull List<? extends Element> fileElements, int tabSizeLimit, JPanel context) {
Ref<EditorWindow> windowRef = new Ref<>();
ApplicationManager.getApplication().invokeAndWait(() -> {
EditorWindow editorWindow = context == null ? createEditorWindow() : findWindowWith(context);
windowRef.set(editorWindow);
if (editorWindow != null) {
if (tabSizeLimit != 1) {
ComponentUtil.putClientProperty(editorWindow.getTabbedPane().getComponent(), JBTabsImpl.SIDE_TABS_SIZE_LIMIT_KEY, tabSizeLimit);
}
}
});
EditorWindow window = windowRef.get();
LOG.assertTrue(window != null);
VirtualFile focusedFile = null;
FileEditorManagerImpl fileEditorManager = getManager();
for (int i = 0; i < fileElements.size(); i++) {
Element file = fileElements.get(i);
Element historyElement = file.getChild(HistoryEntry.TAG);
String fileName = historyElement.getAttributeValue(HistoryEntry.FILE_ATTR);
Activity activity = StartUpMeasurer.startActivity(PathUtil.getFileName(fileName), ActivityCategory.REOPENING_EDITOR);
VirtualFile virtualFile = null;
try {
HistoryEntry entry = HistoryEntry.createLight(fileEditorManager.getProject(), historyElement);
virtualFile = entry.getFile();
if (virtualFile == null) {
throw new InvalidDataException("No file exists: " + entry.getFilePointer().getUrl());
}
virtualFile.putUserData(OPENED_IN_BULK, Boolean.TRUE);
VirtualFile finalVirtualFile = virtualFile;
Document document = ReadAction.compute(() -> {
return finalVirtualFile.isValid() ? FileDocumentManager.getInstance().getDocument(finalVirtualFile) : null;
});
boolean isCurrentTab = Boolean.parseBoolean(file.getAttributeValue(CURRENT_IN_TAB));
FileEditorOpenOptions openOptions = new FileEditorOpenOptions()
.withPin(Boolean.valueOf(file.getAttributeValue(PINNED)))
.withIndex(i)
.withReopeningEditorsOnStartup();
fileEditorManager.openFileImpl4(window, virtualFile, entry, openOptions);
if (isCurrentTab) {
focusedFile = virtualFile;
}
if (document != null) {
// This is just to make sure document reference is kept on stack till this point
// so that document is available for folding state deserialization in HistoryEntry constructor
// and that document will be created only once during file opening
document.putUserData(DUMMY_KEY, null);
}
}
catch (InvalidDataException e) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
LOG.error(e);
}
}
finally {
if (virtualFile != null) {
virtualFile.putUserData(OPENED_IN_BULK, null);
}
}
activity.end();
}
if (focusedFile == null) {
ToolWindowManager manager = ToolWindowManager.getInstance(getManager().getProject());
manager.invokeLater(() -> {
if (manager.getActiveToolWindowId() == null) {
ToolWindow toolWindow = manager.getToolWindow(PROJECT_VIEW);
if (toolWindow != null) {
toolWindow.activate(null);
}
}
});
}
else {
fileEditorManager.addSelectionRecord(focusedFile, window);
VirtualFile finalFocusedFile = focusedFile;
UIUtil.invokeLaterIfNeeded(() -> {
EditorWithProviderComposite editor = window.findFileComposite(finalFocusedFile);
if (editor != null) {
window.setEditor(editor, true, true);
}
});
}
return window.myPanel;
}
@Override
protected JPanel processSplitter(@NotNull Element splitterElement, Element firstChild, Element secondChild, JPanel context) {
if (context == null) {
boolean orientation = "vertical".equals(splitterElement.getAttributeValue("split-orientation"));
float proportion = Float.parseFloat(splitterElement.getAttributeValue("split-proportion"));
JPanel firstComponent = process(firstChild, null);
JPanel secondComponent = process(secondChild, null);
Ref<JPanel> panelRef = new Ref<>();
ApplicationManager.getApplication().invokeAndWait(() -> {
JPanel panel = new JPanel(new BorderLayout());
panel.setOpaque(false);
Splitter splitter = new OnePixelSplitter(orientation, proportion, 0.1f, 0.9f);
panel.add(splitter, BorderLayout.CENTER);
splitter.setFirstComponent(firstComponent);
splitter.setSecondComponent(secondComponent);
panelRef.set(panel);
});
return panelRef.get();
}
Ref<JPanel> firstComponent = new Ref<>();
Ref<JPanel> secondComponent = new Ref<>();
ApplicationManager.getApplication().invokeAndWait(() -> {
if (context.getComponent(0) instanceof Splitter) {
Splitter splitter = (Splitter)context.getComponent(0);
firstComponent.set((JPanel)splitter.getFirstComponent());
secondComponent.set((JPanel)splitter.getSecondComponent());
}
else {
firstComponent.set(context);
secondComponent.set(context);
}
});
process(firstChild, firstComponent.get());
process(secondChild, secondComponent.get());
return context;
}
}
@Nullable
private static EditorsSplitters getSplittersToFocus(@Nullable Project project) {
Window activeWindow = WindowManagerEx.getInstanceEx().getMostRecentFocusedWindow();
if (activeWindow instanceof FloatingDecorator) {
IdeFrame lastFocusedFrame = IdeFocusManager.findInstanceByComponent(activeWindow).getLastFocusedFrame();
JComponent frameComponent = lastFocusedFrame != null ? lastFocusedFrame.getComponent() : null;
Window lastFocusedWindow = frameComponent == null ? null : SwingUtilities.getWindowAncestor(frameComponent);
activeWindow = ObjectUtils.notNull(lastFocusedWindow, activeWindow);
if (project == null) {
project = lastFocusedFrame == null ? null : lastFocusedFrame.getProject();
}
FileEditorManagerEx fileEditorManager = project == null || project.isDisposed() ? null : FileEditorManagerEx.getInstanceEx(project);
if (fileEditorManager == null) {
return null;
}
EditorsSplitters splitters = fileEditorManager.getSplittersFor(activeWindow);
return splitters != null ? splitters : fileEditorManager.getSplitters();
}
if (activeWindow instanceof IdeFrame.Child) {
if (project == null) {
project = ((IdeFrame.Child)activeWindow).getProject();
}
return getSplittersForProject(WindowManager.getInstance().getFrame(project), project);
}
IdeFrame frame = FocusManagerImpl.getInstance().getLastFocusedFrame();
if (frame instanceof IdeFrameImpl && ((IdeFrameImpl)frame).isActive()) {
return getSplittersForProject(activeWindow, frame.getProject());
}
return null;
}
@Nullable
private static EditorsSplitters getSplittersForProject(@Nullable Window activeWindow, @Nullable Project project) {
FileEditorManagerEx fileEditorManager = project == null || project.isDisposed() ? null : FileEditorManagerEx.getInstanceEx(project);
if (fileEditorManager == null) {
return null;
}
EditorsSplitters splitters = activeWindow == null ? null : fileEditorManager.getSplittersFor(activeWindow);
return splitters == null ? fileEditorManager.getSplitters() : splitters;
}
@Nullable
public static JComponent findDefaultComponentInSplitters(@Nullable Project project) {
EditorsSplitters splittersToFocus = getSplittersToFocus(project);
if (splittersToFocus == null) {
return null;
}
EditorWindow window = splittersToFocus.getCurrentWindow();
EditorWithProviderComposite editor = window == null ? null : window.getSelectedEditor();
if (editor != null) {
JComponent defaultFocusedComponentInEditor = editor.getPreferredFocusedComponent();
if (defaultFocusedComponentInEditor != null) {
return defaultFocusedComponentInEditor;
}
}
return null;
}
public static boolean focusDefaultComponentInSplittersIfPresent(@NotNull Project project) {
JComponent defaultFocusedComponentInEditor = findDefaultComponentInSplitters(project);
if (defaultFocusedComponentInEditor != null) {
// not requestFocus because if floating or windowed tool window is deactivated (or, ESC pressed to focus editor), then we should focus our window
defaultFocusedComponentInEditor.requestFocus();
return true;
}
return false;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.security.Key;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.crypto.spec.SecretKeySpec;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.CompactionState;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
@Category({RegionServerTests.class, MediumTests.class})
public class TestEncryptionKeyRotation {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestEncryptionKeyRotation.class);
private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
private static final Configuration conf = TEST_UTIL.getConfiguration();
private static final Key initialCFKey;
private static final Key secondCFKey;
@Rule
public TestName name = new TestName();
static {
// Create the test encryption keys
SecureRandom rng = new SecureRandom();
byte[] keyBytes = new byte[AES.KEY_LENGTH];
rng.nextBytes(keyBytes);
String algorithm =
conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
initialCFKey = new SecretKeySpec(keyBytes, algorithm);
rng.nextBytes(keyBytes);
secondCFKey = new SecretKeySpec(keyBytes, algorithm);
}
@BeforeClass
public static void setUp() throws Exception {
conf.setInt("hfile.format.version", 3);
conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
// Start the minicluster
TEST_UTIL.startMiniCluster(1);
}
@AfterClass
public static void tearDown() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@Test
public void testCFKeyRotation() throws Exception {
// Create the table schema
TableDescriptorBuilder tableDescriptorBuilder =
TableDescriptorBuilder.newBuilder(TableName.valueOf("default", name.getMethodName()));
ColumnFamilyDescriptorBuilder columnFamilyDescriptorBuilder =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf"));
String algorithm =
conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
columnFamilyDescriptorBuilder.setEncryptionType(algorithm);
columnFamilyDescriptorBuilder.setEncryptionKey(EncryptionUtil.wrapKey(conf, "hbase",
initialCFKey));
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptorBuilder.build());
TableDescriptor tableDescriptor = tableDescriptorBuilder.build();
// Create the table and some on disk files
createTableAndFlush(tableDescriptor);
// Verify we have store file(s) with the initial key
final List<Path> initialPaths = findStorefilePaths(tableDescriptor.getTableName());
assertTrue(initialPaths.size() > 0);
for (Path path: initialPaths) {
assertTrue("Store file " + path + " has incorrect key",
Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
}
// Update the schema with a new encryption key
columnFamilyDescriptorBuilder.setEncryptionKey(EncryptionUtil.wrapKey(conf,
conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()),
secondCFKey));
TEST_UTIL.getAdmin().modifyColumnFamily(tableDescriptor.getTableName(),
columnFamilyDescriptorBuilder.build());
Thread.sleep(5000); // Need a predicate for online schema change
// And major compact
TEST_UTIL.getAdmin().majorCompact(tableDescriptor.getTableName());
// waiting for the major compaction to complete
TEST_UTIL.waitFor(30000, new Waiter.Predicate<IOException>() {
@Override
public boolean evaluate() throws IOException {
return TEST_UTIL.getAdmin().getCompactionState(tableDescriptor
.getTableName()) == CompactionState.NONE;
}
});
List<Path> pathsAfterCompaction = findStorefilePaths(tableDescriptor.getTableName());
assertTrue(pathsAfterCompaction.size() > 0);
for (Path path: pathsAfterCompaction) {
assertTrue("Store file " + path + " has incorrect key",
Bytes.equals(secondCFKey.getEncoded(), extractHFileKey(path)));
}
List<Path> compactedPaths = findCompactedStorefilePaths(tableDescriptor.getTableName());
assertTrue(compactedPaths.size() > 0);
for (Path path: compactedPaths) {
assertTrue("Store file " + path + " retains initial key",
Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
}
}
@Test
public void testMasterKeyRotation() throws Exception {
// Create the table schema
TableDescriptorBuilder tableDescriptorBuilder =
TableDescriptorBuilder.newBuilder(TableName.valueOf("default", name.getMethodName()));
ColumnFamilyDescriptorBuilder columnFamilyDescriptorBuilder =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf"));
String algorithm =
conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
columnFamilyDescriptorBuilder.setEncryptionType(algorithm);
columnFamilyDescriptorBuilder.setEncryptionKey(
EncryptionUtil.wrapKey(conf, "hbase", initialCFKey));
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptorBuilder.build());
TableDescriptor tableDescriptor = tableDescriptorBuilder.build();
// Create the table and some on disk files
createTableAndFlush(tableDescriptor);
// Verify we have store file(s) with the initial key
List<Path> storeFilePaths = findStorefilePaths(tableDescriptor.getTableName());
assertTrue(storeFilePaths.size() > 0);
for (Path path: storeFilePaths) {
assertTrue("Store file " + path + " has incorrect key",
Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
}
// Now shut down the HBase cluster
TEST_UTIL.shutdownMiniHBaseCluster();
// "Rotate" the master key
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "other");
conf.set(HConstants.CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY, "hbase");
// Start the cluster back up
TEST_UTIL.startMiniHBaseCluster();
// Verify the table can still be loaded
TEST_UTIL.waitTableAvailable(tableDescriptor.getTableName(), 5000);
// Double check that the store file keys can be unwrapped
storeFilePaths = findStorefilePaths(tableDescriptor.getTableName());
assertTrue(storeFilePaths.size() > 0);
for (Path path: storeFilePaths) {
assertTrue("Store file " + path + " has incorrect key",
Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
}
}
private static List<Path> findStorefilePaths(TableName tableName) throws Exception {
List<Path> paths = new ArrayList<>();
for (Region region : TEST_UTIL.getRSForFirstRegionInTable(tableName)
.getRegions(tableName)) {
for (HStore store : ((HRegion) region).getStores()) {
for (HStoreFile storefile : store.getStorefiles()) {
paths.add(storefile.getPath());
}
}
}
return paths;
}
private static List<Path> findCompactedStorefilePaths(TableName tableName) throws Exception {
List<Path> paths = new ArrayList<>();
for (Region region : TEST_UTIL.getRSForFirstRegionInTable(tableName)
.getRegions(tableName)) {
for (HStore store : ((HRegion) region).getStores()) {
Collection<HStoreFile> compactedfiles =
store.getStoreEngine().getStoreFileManager().getCompactedfiles();
if (compactedfiles != null) {
for (HStoreFile storefile : compactedfiles) {
paths.add(storefile.getPath());
}
}
}
}
return paths;
}
private void createTableAndFlush(TableDescriptor tableDescriptor) throws Exception {
ColumnFamilyDescriptor cfd = tableDescriptor.getColumnFamilies()[0];
// Create the test table
TEST_UTIL.getAdmin().createTable(tableDescriptor);
TEST_UTIL.waitTableAvailable(tableDescriptor.getTableName(), 5000);
// Create a store file
Table table = TEST_UTIL.getConnection().getTable(tableDescriptor.getTableName());
try {
table.put(new Put(Bytes.toBytes("testrow"))
.addColumn(cfd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value")));
} finally {
table.close();
}
TEST_UTIL.getAdmin().flush(tableDescriptor.getTableName());
}
private static byte[] extractHFileKey(Path path) throws Exception {
HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), path,
new CacheConfig(conf), true, conf);
try {
Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext();
assertNotNull("Reader has a null crypto context", cryptoContext);
Key key = cryptoContext.getKey();
assertNotNull("Crypto context has no key", key);
return key.getEncoded();
} finally {
reader.close();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.service;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.cluster.AckedClusterStateTaskListener;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterState.Builder;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateTaskConfig;
import org.elasticsearch.cluster.ClusterStateTaskExecutor;
import org.elasticsearch.cluster.ClusterStateTaskListener;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.LocalNodeMasterListener;
import org.elasticsearch.cluster.NodeConnectionsService;
import org.elasticsearch.cluster.TimeoutClusterStateListener;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.OperationRouting;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor;
import org.elasticsearch.common.util.concurrent.PrioritizedRunnable;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import java.util.stream.Collectors;
import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory;
public class ClusterService extends AbstractLifecycleComponent {
public static final Setting<TimeValue> CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING =
Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30),
Property.Dynamic, Property.NodeScope);
public static final String UPDATE_THREAD_NAME = "clusterService#updateTask";
private final ThreadPool threadPool;
private final ClusterName clusterName;
private BiConsumer<ClusterChangedEvent, Discovery.AckListener> clusterStatePublisher;
private final OperationRouting operationRouting;
private final ClusterSettings clusterSettings;
private TimeValue slowTaskLoggingThreshold;
private volatile PrioritizedEsThreadPoolExecutor updateTasksExecutor;
/**
* Those 3 state listeners are changing infrequently - CopyOnWriteArrayList is just fine
*/
private final Collection<ClusterStateListener> priorityClusterStateListeners = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateListener> clusterStateListeners = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateListener> lastClusterStateListeners = new CopyOnWriteArrayList<>();
private final Map<ClusterStateTaskExecutor, List<UpdateTask>> updateTasksPerExecutor = new HashMap<>();
// TODO this is rather frequently changing I guess a Synced Set would be better here and a dedicated remove API
private final Collection<ClusterStateListener> postAppliedListeners = new CopyOnWriteArrayList<>();
private final Iterable<ClusterStateListener> preAppliedListeners = Iterables.concat(priorityClusterStateListeners,
clusterStateListeners, lastClusterStateListeners);
private final LocalNodeMasterListeners localNodeMasterListeners;
private final Queue<NotifyTimeout> onGoingTimeouts = ConcurrentCollections.newQueue();
private volatile ClusterState clusterState;
private final ClusterBlocks.Builder initialBlocks;
private NodeConnectionsService nodeConnectionsService;
public ClusterService(Settings settings,
ClusterSettings clusterSettings, ThreadPool threadPool) {
super(settings);
this.operationRouting = new OperationRouting(settings, clusterSettings);
this.threadPool = threadPool;
this.clusterSettings = clusterSettings;
this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings);
// will be replaced on doStart.
this.clusterState = ClusterState.builder(clusterName).build();
this.clusterSettings.addSettingsUpdateConsumer(CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
this::setSlowTaskLoggingThreshold);
this.slowTaskLoggingThreshold = CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.get(settings);
localNodeMasterListeners = new LocalNodeMasterListeners(threadPool);
initialBlocks = ClusterBlocks.builder();
}
private void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) {
this.slowTaskLoggingThreshold = slowTaskLoggingThreshold;
}
public synchronized void setClusterStatePublisher(BiConsumer<ClusterChangedEvent, Discovery.AckListener> publisher) {
clusterStatePublisher = publisher;
}
public synchronized void setLocalNode(DiscoveryNode localNode) {
assert clusterState.nodes().getLocalNodeId() == null : "local node is already set";
DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder(clusterState.nodes()).add(localNode).localNodeId(localNode.getId());
this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).build();
}
public synchronized void setNodeConnectionsService(NodeConnectionsService nodeConnectionsService) {
assert this.nodeConnectionsService == null : "nodeConnectionsService is already set";
this.nodeConnectionsService = nodeConnectionsService;
}
/**
* Adds an initial block to be set on the first cluster state created.
*/
public synchronized void addInitialStateBlock(ClusterBlock block) throws IllegalStateException {
if (lifecycle.started()) {
throw new IllegalStateException("can't set initial block when started");
}
initialBlocks.addGlobalBlock(block);
}
/**
* Remove an initial block to be set on the first cluster state created.
*/
public synchronized void removeInitialStateBlock(ClusterBlock block) throws IllegalStateException {
removeInitialStateBlock(block.id());
}
/**
* Remove an initial block to be set on the first cluster state created.
*/
public synchronized void removeInitialStateBlock(int blockId) throws IllegalStateException {
if (lifecycle.started()) {
throw new IllegalStateException("can't set initial block when started");
}
initialBlocks.removeGlobalBlock(blockId);
}
@Override
protected synchronized void doStart() {
Objects.requireNonNull(clusterStatePublisher, "please set a cluster state publisher before starting");
Objects.requireNonNull(clusterState.nodes().getLocalNode(), "please set the local node before starting");
Objects.requireNonNull(nodeConnectionsService, "please set the node connection service before starting");
add(localNodeMasterListeners);
this.clusterState = ClusterState.builder(clusterState).blocks(initialBlocks).build();
this.updateTasksExecutor = EsExecutors.newSinglePrioritizing(UPDATE_THREAD_NAME, daemonThreadFactory(settings, UPDATE_THREAD_NAME),
threadPool.getThreadContext());
this.clusterState = ClusterState.builder(clusterState).blocks(initialBlocks).build();
}
@Override
protected synchronized void doStop() {
for (NotifyTimeout onGoingTimeout : onGoingTimeouts) {
onGoingTimeout.cancel();
try {
onGoingTimeout.cancel();
onGoingTimeout.listener.onClose();
} catch (Exception ex) {
logger.debug("failed to notify listeners on shutdown", ex);
}
}
ThreadPool.terminate(updateTasksExecutor, 10, TimeUnit.SECONDS);
// close timeout listeners that did not have an ongoing timeout
postAppliedListeners
.stream()
.filter(listener -> listener instanceof TimeoutClusterStateListener)
.map(listener -> (TimeoutClusterStateListener)listener)
.forEach(TimeoutClusterStateListener::onClose);
remove(localNodeMasterListeners);
}
@Override
protected synchronized void doClose() {
}
/**
* The local node.
*/
public DiscoveryNode localNode() {
DiscoveryNode localNode = clusterState.getNodes().getLocalNode();
if (localNode == null) {
throw new IllegalStateException("No local node found. Is the node started?");
}
return localNode;
}
public OperationRouting operationRouting() {
return operationRouting;
}
/**
* The current state.
*/
public ClusterState state() {
return this.clusterState;
}
/**
* Adds a priority listener for updated cluster states.
*/
public void addFirst(ClusterStateListener listener) {
priorityClusterStateListeners.add(listener);
}
/**
* Adds last listener.
*/
public void addLast(ClusterStateListener listener) {
lastClusterStateListeners.add(listener);
}
/**
* Adds a listener for updated cluster states.
*/
public void add(ClusterStateListener listener) {
clusterStateListeners.add(listener);
}
/**
* Removes a listener for updated cluster states.
*/
public void remove(ClusterStateListener listener) {
clusterStateListeners.remove(listener);
priorityClusterStateListeners.remove(listener);
lastClusterStateListeners.remove(listener);
postAppliedListeners.remove(listener);
for (Iterator<NotifyTimeout> it = onGoingTimeouts.iterator(); it.hasNext(); ) {
NotifyTimeout timeout = it.next();
if (timeout.listener.equals(listener)) {
timeout.cancel();
it.remove();
}
}
}
/**
* Add a listener for on/off local node master events
*/
public void add(LocalNodeMasterListener listener) {
localNodeMasterListeners.add(listener);
}
/**
* Remove the given listener for on/off local master events
*/
public void remove(LocalNodeMasterListener listener) {
localNodeMasterListeners.remove(listener);
}
/**
* Adds a cluster state listener that will timeout after the provided timeout,
* and is executed after the clusterstate has been successfully applied ie. is
* in state {@link org.elasticsearch.cluster.ClusterState.ClusterStateStatus#APPLIED}
* NOTE: a {@code null} timeout means that the listener will never be removed
* automatically
*/
public void add(@Nullable final TimeValue timeout, final TimeoutClusterStateListener listener) {
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
return;
}
// call the post added notification on the same event thread
try {
updateTasksExecutor.execute(new SourcePrioritizedRunnable(Priority.HIGH, "_add_listener_") {
@Override
public void run() {
if (timeout != null) {
NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout);
notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout);
onGoingTimeouts.add(notifyTimeout);
}
postAppliedListeners.add(listener);
listener.postAdded();
}
});
} catch (EsRejectedExecutionException e) {
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
throw e;
}
}
}
/**
* Submits a cluster state update task; unlike {@link #submitStateUpdateTask(String, Object, ClusterStateTaskConfig,
* ClusterStateTaskExecutor, ClusterStateTaskListener)}, submitted updates will not be batched.
*
* @param source the source of the cluster state update task
* @param updateTask the full context for the cluster state update
* task
*
*/
public void submitStateUpdateTask(final String source, final ClusterStateUpdateTask updateTask) {
submitStateUpdateTask(source, updateTask, updateTask, updateTask, updateTask);
}
/**
* Submits a cluster state update task; submitted updates will be
* batched across the same instance of executor. The exact batching
* semantics depend on the underlying implementation but a rough
* guideline is that if the update task is submitted while there
* are pending update tasks for the same executor, these update
* tasks will all be executed on the executor in a single batch
*
* @param source the source of the cluster state update task
* @param task the state needed for the cluster state update task
* @param config the cluster state update task configuration
* @param executor the cluster state update task executor; tasks
* that share the same executor will be executed
* batches on this executor
* @param listener callback after the cluster state update task
* completes
* @param <T> the type of the cluster state update task state
*
*/
public <T> void submitStateUpdateTask(final String source, final T task,
final ClusterStateTaskConfig config,
final ClusterStateTaskExecutor<T> executor,
final ClusterStateTaskListener listener) {
submitStateUpdateTasks(source, Collections.singletonMap(task, listener), config, executor);
}
/**
* Submits a batch of cluster state update tasks; submitted updates are guaranteed to be processed together,
* potentially with more tasks of the same executor.
*
* @param source the source of the cluster state update task
* @param tasks a map of update tasks and their corresponding listeners
* @param config the cluster state update task configuration
* @param executor the cluster state update task executor; tasks
* that share the same executor will be executed
* batches on this executor
* @param <T> the type of the cluster state update task state
*
*/
public <T> void submitStateUpdateTasks(final String source,
final Map<T, ClusterStateTaskListener> tasks, final ClusterStateTaskConfig config,
final ClusterStateTaskExecutor<T> executor) {
if (!lifecycle.started()) {
return;
}
if (tasks.isEmpty()) {
return;
}
try {
// convert to an identity map to check for dups based on update tasks semantics of using identity instead of equal
final IdentityHashMap<T, ClusterStateTaskListener> tasksIdentity = new IdentityHashMap<>(tasks);
final List<UpdateTask<T>> updateTasks = tasksIdentity.entrySet().stream().map(
entry -> new UpdateTask<>(source, entry.getKey(), config, executor, safe(entry.getValue(), logger))
).collect(Collectors.toList());
synchronized (updateTasksPerExecutor) {
List<UpdateTask> existingTasks = updateTasksPerExecutor.computeIfAbsent(executor, k -> new ArrayList<>());
for (@SuppressWarnings("unchecked") UpdateTask<T> existing : existingTasks) {
if (tasksIdentity.containsKey(existing.task)) {
throw new IllegalStateException("task [" + executor.describeTasks(Collections.singletonList(existing.task)) +
"] with source [" + source + "] is already queued");
}
}
existingTasks.addAll(updateTasks);
}
final UpdateTask<T> firstTask = updateTasks.get(0);
if (config.timeout() != null) {
updateTasksExecutor.execute(firstTask, threadPool.scheduler(), config.timeout(), () -> threadPool.generic().execute(() -> {
for (UpdateTask<T> task : updateTasks) {
if (task.processed.getAndSet(true) == false) {
logger.debug("cluster state update task [{}] timed out after [{}]", source, config.timeout());
task.listener.onFailure(source, new ProcessClusterEventTimeoutException(config.timeout(), source));
}
}
}));
} else {
updateTasksExecutor.execute(firstTask);
}
} catch (EsRejectedExecutionException e) {
// ignore cases where we are shutting down..., there is really nothing interesting
// to be done here...
if (!lifecycle.stoppedOrClosed()) {
throw e;
}
}
}
/**
* Returns the tasks that are pending.
*/
public List<PendingClusterTask> pendingTasks() {
PrioritizedEsThreadPoolExecutor.Pending[] pendings = updateTasksExecutor.getPending();
List<PendingClusterTask> pendingClusterTasks = new ArrayList<>(pendings.length);
for (PrioritizedEsThreadPoolExecutor.Pending pending : pendings) {
final String source;
final long timeInQueue;
// we have to capture the task as it will be nulled after execution and we don't want to change while we check things here.
final Object task = pending.task;
if (task == null) {
continue;
} else if (task instanceof SourcePrioritizedRunnable) {
SourcePrioritizedRunnable runnable = (SourcePrioritizedRunnable) task;
source = runnable.source();
timeInQueue = runnable.getAgeInMillis();
} else {
assert false : "expected SourcePrioritizedRunnable got " + task.getClass();
source = "unknown [" + task.getClass() + "]";
timeInQueue = 0;
}
pendingClusterTasks.add(
new PendingClusterTask(pending.insertionOrder, pending.priority, new Text(source), timeInQueue, pending.executing));
}
return pendingClusterTasks;
}
/**
* Returns the number of currently pending tasks.
*/
public int numberOfPendingTasks() {
return updateTasksExecutor.getNumberOfPendingTasks();
}
/**
* Returns the maximum wait time for tasks in the queue
*
* @return A zero time value if the queue is empty, otherwise the time value oldest task waiting in the queue
*/
public TimeValue getMaxTaskWaitTime() {
return updateTasksExecutor.getMaxTaskWaitTime();
}
/** asserts that the current thread is the cluster state update thread */
public static boolean assertClusterStateThread() {
assert Thread.currentThread().getName().contains(ClusterService.UPDATE_THREAD_NAME) :
"not called from the cluster state update thread";
return true;
}
public ClusterName getClusterName() {
return clusterName;
}
abstract static class SourcePrioritizedRunnable extends PrioritizedRunnable {
protected final String source;
public SourcePrioritizedRunnable(Priority priority, String source) {
super(priority);
this.source = source;
}
public String source() {
return source;
}
}
<T> void runTasksForExecutor(ClusterStateTaskExecutor<T> executor) {
final ArrayList<UpdateTask<T>> toExecute = new ArrayList<>();
final Map<String, ArrayList<T>> processTasksBySource = new HashMap<>();
synchronized (updateTasksPerExecutor) {
List<UpdateTask> pending = updateTasksPerExecutor.remove(executor);
if (pending != null) {
for (UpdateTask<T> task : pending) {
if (task.processed.getAndSet(true) == false) {
logger.trace("will process {}", task.toString(executor));
toExecute.add(task);
processTasksBySource.computeIfAbsent(task.source, s -> new ArrayList<>()).add(task.task);
} else {
logger.trace("skipping {}, already processed", task.toString(executor));
}
}
}
}
if (toExecute.isEmpty()) {
return;
}
final String tasksSummary = processTasksBySource.entrySet().stream().map(entry -> {
String tasks = executor.describeTasks(entry.getValue());
return tasks.isEmpty() ? entry.getKey() : entry.getKey() + "[" + tasks + "]";
}).reduce((s1, s2) -> s1 + ", " + s2).orElse("");
if (!lifecycle.started()) {
logger.debug("processing [{}]: ignoring, cluster_service not started", tasksSummary);
return;
}
logger.debug("processing [{}]: execute", tasksSummary);
ClusterState previousClusterState = clusterState;
if (!previousClusterState.nodes().isLocalNodeElectedMaster() && executor.runOnlyOnMaster()) {
logger.debug("failing [{}]: local node is no longer master", tasksSummary);
toExecute.stream().forEach(task -> task.listener.onNoLongerMaster(task.source));
return;
}
ClusterStateTaskExecutor.BatchResult<T> batchResult;
long startTimeNS = currentTimeInNanos();
try {
List<T> inputs = toExecute.stream().map(tUpdateTask -> tUpdateTask.task).collect(Collectors.toList());
batchResult = executor.execute(previousClusterState, inputs);
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
if (logger.isTraceEnabled()) {
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to execute cluster state update in [{}], state:\nversion [{}], source [{}]\n{}{}{}",
executionTime,
previousClusterState.version(),
tasksSummary,
previousClusterState.nodes().prettyPrint(),
previousClusterState.routingTable().prettyPrint(),
previousClusterState.getRoutingNodes().prettyPrint()),
e);
}
warnAboutSlowTaskIfNeeded(executionTime, tasksSummary);
batchResult = ClusterStateTaskExecutor.BatchResult.<T>builder()
.failures(toExecute.stream().map(updateTask -> updateTask.task)::iterator, e)
.build(previousClusterState);
}
assert batchResult.executionResults != null;
assert batchResult.executionResults.size() == toExecute.size()
: String.format(Locale.ROOT, "expected [%d] task result%s but was [%d]", toExecute.size(),
toExecute.size() == 1 ? "" : "s", batchResult.executionResults.size());
boolean assertsEnabled = false;
assert (assertsEnabled = true);
if (assertsEnabled) {
for (UpdateTask<T> updateTask : toExecute) {
assert batchResult.executionResults.containsKey(updateTask.task) :
"missing task result for " + updateTask.toString(executor);
}
}
ClusterState newClusterState = batchResult.resultingState;
final ArrayList<UpdateTask<T>> proccessedListeners = new ArrayList<>();
// fail all tasks that have failed and extract those that are waiting for results
for (UpdateTask<T> updateTask : toExecute) {
assert batchResult.executionResults.containsKey(updateTask.task) : "missing " + updateTask.toString(executor);
final ClusterStateTaskExecutor.TaskResult executionResult =
batchResult.executionResults.get(updateTask.task);
executionResult.handle(
() -> proccessedListeners.add(updateTask),
ex -> {
logger.debug(
(Supplier<?>)
() -> new ParameterizedMessage("cluster state update task {} failed", updateTask.toString(executor)), ex);
updateTask.listener.onFailure(updateTask.source, ex);
}
);
}
if (previousClusterState == newClusterState) {
for (UpdateTask<T> task : proccessedListeners) {
if (task.listener instanceof AckedClusterStateTaskListener) {
//no need to wait for ack if nothing changed, the update can be counted as acknowledged
((AckedClusterStateTaskListener) task.listener).onAllNodesAcked(null);
}
task.listener.clusterStateProcessed(task.source, previousClusterState, newClusterState);
}
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
logger.debug("processing [{}]: took [{}] no change in cluster_state", tasksSummary, executionTime);
warnAboutSlowTaskIfNeeded(executionTime, tasksSummary);
return;
}
try {
ArrayList<Discovery.AckListener> ackListeners = new ArrayList<>();
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
// only the master controls the version numbers
Builder builder = ClusterState.builder(newClusterState).incrementVersion();
if (previousClusterState.routingTable() != newClusterState.routingTable()) {
builder.routingTable(RoutingTable.builder(newClusterState.routingTable())
.version(newClusterState.routingTable().version() + 1).build());
}
if (previousClusterState.metaData() != newClusterState.metaData()) {
builder.metaData(MetaData.builder(newClusterState.metaData()).version(newClusterState.metaData().version() + 1));
}
newClusterState = builder.build();
for (UpdateTask<T> task : proccessedListeners) {
if (task.listener instanceof AckedClusterStateTaskListener) {
final AckedClusterStateTaskListener ackedListener = (AckedClusterStateTaskListener) task.listener;
if (ackedListener.ackTimeout() == null || ackedListener.ackTimeout().millis() == 0) {
ackedListener.onAckTimeout();
} else {
try {
ackListeners.add(new AckCountDownListener(ackedListener, newClusterState.version(), newClusterState.nodes(),
threadPool));
} catch (EsRejectedExecutionException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Couldn't schedule timeout thread - node might be shutting down", ex);
}
//timeout straightaway, otherwise we could wait forever as the timeout thread has not started
ackedListener.onAckTimeout();
}
}
}
}
}
final Discovery.AckListener ackListener = new DelegetingAckListener(ackListeners);
newClusterState.status(ClusterState.ClusterStateStatus.BEING_APPLIED);
if (logger.isTraceEnabled()) {
logger.trace("cluster state updated, source [{}]\n{}", tasksSummary, newClusterState.prettyPrint());
} else if (logger.isDebugEnabled()) {
logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), tasksSummary);
}
ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(tasksSummary, newClusterState, previousClusterState);
// new cluster state, notify all listeners
final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta();
if (nodesDelta.hasChanges() && logger.isInfoEnabled()) {
String summary = nodesDelta.shortSummary();
if (summary.length() > 0) {
logger.info("{}, reason: {}", summary, tasksSummary);
}
}
nodeConnectionsService.connectToAddedNodes(clusterChangedEvent);
// if we are the master, publish the new state to all nodes
// we publish here before we send a notification to all the listeners, since if it fails
// we don't want to notify
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
logger.debug("publishing cluster state version [{}]", newClusterState.version());
try {
clusterStatePublisher.accept(clusterChangedEvent, ackListener);
} catch (Discovery.FailedToCommitClusterStateException t) {
final long version = newClusterState.version();
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"failing [{}]: failed to commit cluster state version [{}]", tasksSummary, version),
t);
proccessedListeners.forEach(task -> task.listener.onFailure(task.source, t));
return;
}
}
// update the current cluster state
clusterState = newClusterState;
logger.debug("set local cluster state to version {}", newClusterState.version());
try {
// nothing to do until we actually recover from the gateway or any other block indicates we need to disable persistency
if (clusterChangedEvent.state().blocks().disableStatePersistence() == false && clusterChangedEvent.metaDataChanged()) {
final Settings incomingSettings = clusterChangedEvent.state().metaData().settings();
clusterSettings.applySettings(incomingSettings);
}
} catch (Exception ex) {
logger.warn("failed to apply cluster settings", ex);
}
for (ClusterStateListener listener : preAppliedListeners) {
try {
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex);
}
}
nodeConnectionsService.disconnectFromRemovedNodes(clusterChangedEvent);
newClusterState.status(ClusterState.ClusterStateStatus.APPLIED);
for (ClusterStateListener listener : postAppliedListeners) {
try {
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex);
}
}
//manual ack only from the master at the end of the publish
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
try {
ackListener.onNodeAck(newClusterState.nodes().getLocalNode(), null);
} catch (Exception e) {
final DiscoveryNode localNode = newClusterState.nodes().getLocalNode();
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage("error while processing ack for master node [{}]", localNode),
e);
}
}
for (UpdateTask<T> task : proccessedListeners) {
task.listener.clusterStateProcessed(task.source, previousClusterState, newClusterState);
}
try {
executor.clusterStatePublished(clusterChangedEvent);
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown while notifying executor of new cluster state publication [{}]",
tasksSummary),
e);
}
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
logger.debug("processing [{}]: took [{}] done applying updated cluster_state (version: {}, uuid: {})", tasksSummary,
executionTime, newClusterState.version(), newClusterState.stateUUID());
warnAboutSlowTaskIfNeeded(executionTime, tasksSummary);
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
final long version = newClusterState.version();
final String stateUUID = newClusterState.stateUUID();
final String prettyPrint = newClusterState.prettyPrint();
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]\n{}",
executionTime,
version,
stateUUID,
tasksSummary,
prettyPrint),
e);
// TODO: do we want to call updateTask.onFailure here?
}
}
// this one is overridden in tests so we can control time
protected long currentTimeInNanos() {return System.nanoTime();}
private static SafeClusterStateTaskListener safe(ClusterStateTaskListener listener, Logger logger) {
if (listener instanceof AckedClusterStateTaskListener) {
return new SafeAckedClusterStateTaskListener((AckedClusterStateTaskListener) listener, logger);
} else {
return new SafeClusterStateTaskListener(listener, logger);
}
}
private static class SafeClusterStateTaskListener implements ClusterStateTaskListener {
private final ClusterStateTaskListener listener;
private final Logger logger;
public SafeClusterStateTaskListener(ClusterStateTaskListener listener, Logger logger) {
this.listener = listener;
this.logger = logger;
}
@Override
public void onFailure(String source, Exception e) {
try {
listener.onFailure(source, e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown by listener notifying of failure from [{}]", source), inner);
}
}
@Override
public void onNoLongerMaster(String source) {
try {
listener.onNoLongerMaster(source);
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown by listener while notifying no longer master from [{}]", source), e);
}
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
try {
listener.clusterStateProcessed(source, oldState, newState);
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown by listener while notifying of cluster state processed from [{}], old cluster state:\n" +
"{}\nnew cluster state:\n{}",
source,
oldState.prettyPrint(),
newState.prettyPrint()),
e);
}
}
}
private static class SafeAckedClusterStateTaskListener extends SafeClusterStateTaskListener implements AckedClusterStateTaskListener {
private final AckedClusterStateTaskListener listener;
private final Logger logger;
public SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Logger logger) {
super(listener, logger);
this.listener = listener;
this.logger = logger;
}
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
return listener.mustAck(discoveryNode);
}
@Override
public void onAllNodesAcked(@Nullable Exception e) {
try {
listener.onAllNodesAcked(e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.error("exception thrown by listener while notifying on all nodes acked", inner);
}
}
@Override
public void onAckTimeout() {
try {
listener.onAckTimeout();
} catch (Exception e) {
logger.error("exception thrown by listener while notifying on ack timeout", e);
}
}
@Override
public TimeValue ackTimeout() {
return listener.ackTimeout();
}
}
class UpdateTask<T> extends SourcePrioritizedRunnable {
public final T task;
public final ClusterStateTaskConfig config;
public final ClusterStateTaskExecutor<T> executor;
public final ClusterStateTaskListener listener;
public final AtomicBoolean processed = new AtomicBoolean();
UpdateTask(String source, T task, ClusterStateTaskConfig config, ClusterStateTaskExecutor<T> executor,
ClusterStateTaskListener listener) {
super(config.priority(), source);
this.task = task;
this.config = config;
this.executor = executor;
this.listener = listener;
}
@Override
public void run() {
// if this task is already processed, the executor shouldn't execute other tasks (that arrived later),
// to give other executors a chance to execute their tasks.
if (processed.get() == false) {
runTasksForExecutor(executor);
}
}
public String toString(ClusterStateTaskExecutor<T> executor) {
String taskDescription = executor.describeTasks(Collections.singletonList(task));
if (taskDescription.isEmpty()) {
return "[" + source + "]";
} else {
return "[" + source + "[" + taskDescription + "]]";
}
}
}
private void warnAboutSlowTaskIfNeeded(TimeValue executionTime, String source) {
if (executionTime.getMillis() > slowTaskLoggingThreshold.getMillis()) {
logger.warn("cluster state update task [{}] took [{}] above the warn threshold of {}", source, executionTime,
slowTaskLoggingThreshold);
}
}
class NotifyTimeout implements Runnable {
final TimeoutClusterStateListener listener;
final TimeValue timeout;
volatile ScheduledFuture future;
NotifyTimeout(TimeoutClusterStateListener listener, TimeValue timeout) {
this.listener = listener;
this.timeout = timeout;
}
public void cancel() {
FutureUtils.cancel(future);
}
@Override
public void run() {
if (future != null && future.isCancelled()) {
return;
}
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
listener.onTimeout(this.timeout);
}
// note, we rely on the listener to remove itself in case of timeout if needed
}
}
private static class LocalNodeMasterListeners implements ClusterStateListener {
private final List<LocalNodeMasterListener> listeners = new CopyOnWriteArrayList<>();
private final ThreadPool threadPool;
private volatile boolean master = false;
private LocalNodeMasterListeners(ThreadPool threadPool) {
this.threadPool = threadPool;
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (!master && event.localNodeMaster()) {
master = true;
for (LocalNodeMasterListener listener : listeners) {
Executor executor = threadPool.executor(listener.executorName());
executor.execute(new OnMasterRunnable(listener));
}
return;
}
if (master && !event.localNodeMaster()) {
master = false;
for (LocalNodeMasterListener listener : listeners) {
Executor executor = threadPool.executor(listener.executorName());
executor.execute(new OffMasterRunnable(listener));
}
}
}
private void add(LocalNodeMasterListener listener) {
listeners.add(listener);
}
private void remove(LocalNodeMasterListener listener) {
listeners.remove(listener);
}
private void clear() {
listeners.clear();
}
}
private static class OnMasterRunnable implements Runnable {
private final LocalNodeMasterListener listener;
private OnMasterRunnable(LocalNodeMasterListener listener) {
this.listener = listener;
}
@Override
public void run() {
listener.onMaster();
}
}
private static class OffMasterRunnable implements Runnable {
private final LocalNodeMasterListener listener;
private OffMasterRunnable(LocalNodeMasterListener listener) {
this.listener = listener;
}
@Override
public void run() {
listener.offMaster();
}
}
private static class DelegetingAckListener implements Discovery.AckListener {
private final List<Discovery.AckListener> listeners;
private DelegetingAckListener(List<Discovery.AckListener> listeners) {
this.listeners = listeners;
}
@Override
public void onNodeAck(DiscoveryNode node, @Nullable Exception e) {
for (Discovery.AckListener listener : listeners) {
listener.onNodeAck(node, e);
}
}
@Override
public void onTimeout() {
throw new UnsupportedOperationException("no timeout delegation");
}
}
private static class AckCountDownListener implements Discovery.AckListener {
private static final Logger logger = Loggers.getLogger(AckCountDownListener.class);
private final AckedClusterStateTaskListener ackedTaskListener;
private final CountDown countDown;
private final DiscoveryNodes nodes;
private final long clusterStateVersion;
private final Future<?> ackTimeoutCallback;
private Exception lastFailure;
AckCountDownListener(AckedClusterStateTaskListener ackedTaskListener, long clusterStateVersion, DiscoveryNodes nodes,
ThreadPool threadPool) {
this.ackedTaskListener = ackedTaskListener;
this.clusterStateVersion = clusterStateVersion;
this.nodes = nodes;
int countDown = 0;
for (DiscoveryNode node : nodes) {
if (ackedTaskListener.mustAck(node)) {
countDown++;
}
}
//we always wait for at least 1 node (the master)
countDown = Math.max(1, countDown);
logger.trace("expecting {} acknowledgements for cluster_state update (version: {})", countDown, clusterStateVersion);
this.countDown = new CountDown(countDown);
this.ackTimeoutCallback = threadPool.schedule(ackedTaskListener.ackTimeout(), ThreadPool.Names.GENERIC, new Runnable() {
@Override
public void run() {
onTimeout();
}
});
}
@Override
public void onNodeAck(DiscoveryNode node, @Nullable Exception e) {
if (!ackedTaskListener.mustAck(node)) {
//we always wait for the master ack anyway
if (!node.equals(nodes.getMasterNode())) {
return;
}
}
if (e == null) {
logger.trace("ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion);
} else {
this.lastFailure = e;
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion),
e);
}
if (countDown.countDown()) {
logger.trace("all expected nodes acknowledged cluster_state update (version: {})", clusterStateVersion);
FutureUtils.cancel(ackTimeoutCallback);
ackedTaskListener.onAllNodesAcked(lastFailure);
}
}
@Override
public void onTimeout() {
if (countDown.fastForward()) {
logger.trace("timeout waiting for acknowledgement for cluster_state update (version: {})", clusterStateVersion);
ackedTaskListener.onAckTimeout();
}
}
}
public ClusterSettings getClusterSettings() {
return clusterSettings;
}
public Settings getSettings() {
return settings;
}
}
| |
package net.engio.mbassy;
import net.engio.mbassy.bus.MBassador;
import net.engio.mbassy.bus.SyncMessageBus;
import net.engio.mbassy.bus.common.GenericMessagePublicationSupport;
import net.engio.mbassy.bus.config.BusConfiguration;
import net.engio.mbassy.bus.config.Feature;
import net.engio.mbassy.bus.config.IBusConfiguration;
import net.engio.mbassy.bus.error.IPublicationErrorHandler;
import net.engio.mbassy.bus.error.PublicationError;
import net.engio.mbassy.common.ConcurrentExecutor;
import net.engio.mbassy.common.ListenerFactory;
import net.engio.mbassy.common.MessageBusTest;
import net.engio.mbassy.common.TestUtil;
import net.engio.mbassy.listener.Handler;
import net.engio.mbassy.listeners.CustomInvocationListener;
import net.engio.mbassy.listeners.ExceptionThrowingListener;
import net.engio.mbassy.listeners.IMessageListener;
import net.engio.mbassy.listeners.MessagesListener;
import net.engio.mbassy.messages.MessageTypes;
import net.engio.mbassy.messages.MultipartMessage;
import net.engio.mbassy.messages.StandardMessage;
import org.junit.Test;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Test synchronous and asynchronous dispatch in single and multi-threaded scenario.
*
* @author bennidi
* Date: 2/8/12
*/
public abstract class SyncBusTest extends MessageBusTest {
protected abstract GenericMessagePublicationSupport getSyncMessageBus(boolean failOnException, IPublicationErrorHandler errorHandler);
protected abstract GenericMessagePublicationSupport getSyncMessageBus(boolean failOnException);
@Test
public void testSynchronousMessagePublication() throws Exception {
final GenericMessagePublicationSupport bus = getSyncMessageBus(true);
ListenerFactory listeners = new ListenerFactory()
.create(InstancesPerListener, IMessageListener.DefaultListener.class)
.create(InstancesPerListener, IMessageListener.DisabledListener.class)
.create(InstancesPerListener, MessagesListener.DefaultListener.class)
.create(InstancesPerListener, MessagesListener.DisabledListener.class)
.create(InstancesPerListener, Object.class);
ConcurrentExecutor.runConcurrent(TestUtil.subscriber(bus, listeners), ConcurrentUnits);
Runnable publishAndCheck = new Runnable() {
@Override
public void run() {
StandardMessage standardMessage = new StandardMessage();
MultipartMessage multipartMessage = new MultipartMessage();
bus.post(standardMessage).now();
bus.post(multipartMessage).now();
bus.post(MessageTypes.Simple).now();
bus.post(MessageTypes.Multipart).now();
assertEquals(InstancesPerListener, standardMessage.getTimesHandled(IMessageListener.DefaultListener.class));
assertEquals(InstancesPerListener, multipartMessage.getTimesHandled(IMessageListener.DefaultListener.class));
}
};
// single threaded
ConcurrentExecutor.runConcurrent(publishAndCheck, 1);
// multi threaded
MessageTypes.resetAll();
ConcurrentExecutor.runConcurrent(publishAndCheck, ConcurrentUnits);
assertEquals(InstancesPerListener * ConcurrentUnits, MessageTypes.Simple.getTimesHandled(IMessageListener.DefaultListener.class));
assertEquals(InstancesPerListener * ConcurrentUnits, MessageTypes.Multipart.getTimesHandled(IMessageListener.DefaultListener.class));
assertEquals(InstancesPerListener * ConcurrentUnits, MessageTypes.Simple.getTimesHandled(MessagesListener.DefaultListener.class));
assertEquals(InstancesPerListener * ConcurrentUnits, MessageTypes.Multipart.getTimesHandled(MessagesListener.DefaultListener.class));
}
@Test
public void testExceptionInHandlerInvocation(){
final AtomicInteger exceptionCount = new AtomicInteger(0);
IPublicationErrorHandler ExceptionCounter = new IPublicationErrorHandler() {
@Override
public void handleError(PublicationError error) {
exceptionCount.incrementAndGet();
}
};
//DS: modified to pass ExceptionCounter via the configuration object
final GenericMessagePublicationSupport bus = getSyncMessageBus(false,ExceptionCounter);
ListenerFactory listeners = new ListenerFactory()
.create(InstancesPerListener, ExceptionThrowingListener.class);
ConcurrentExecutor.runConcurrent(TestUtil.subscriber(bus, listeners), ConcurrentUnits);
Runnable publish = new Runnable() {
@Override
public void run() {
bus.post(new StandardMessage()).now();
}
};
// single threaded
ConcurrentExecutor.runConcurrent(publish, 1);
exceptionCount.set(0);
// multi threaded
ConcurrentExecutor.runConcurrent(publish, ConcurrentUnits);
assertEquals(InstancesPerListener * ConcurrentUnits, exceptionCount.get());
}
@Test
public void testCustomHandlerInvocation(){
final GenericMessagePublicationSupport bus = getSyncMessageBus(true);
ListenerFactory listeners = new ListenerFactory()
.create(InstancesPerListener, CustomInvocationListener.class)
.create(InstancesPerListener, Object.class);
ConcurrentExecutor.runConcurrent(TestUtil.subscriber(bus, listeners), ConcurrentUnits);
Runnable publishAndCheck = new Runnable() {
@Override
public void run() {
StandardMessage standardMessage = new StandardMessage();
MultipartMessage multipartMessage = new MultipartMessage();
bus.post(standardMessage).now();
bus.post(multipartMessage).now();
bus.post(MessageTypes.Simple).now();
assertEquals(InstancesPerListener * 2, standardMessage.getTimesHandled(CustomInvocationListener.class));
assertEquals(0, multipartMessage.getTimesHandled(CustomInvocationListener.class));
assertEquals(0, MessageTypes.Simple.getTimesHandled(CustomInvocationListener.class));
}
};
// single threaded
ConcurrentExecutor.runConcurrent(publishAndCheck, 1);
// multi threaded
ConcurrentExecutor.runConcurrent(publishAndCheck, ConcurrentUnits);
}
@Test
public void testHandlerPriorities(){
final GenericMessagePublicationSupport bus = getSyncMessageBus(true);
ListenerFactory listeners = new ListenerFactory()
.create(InstancesPerListener, PrioritizedListener.class)
.create(InstancesPerListener, Object.class);
ConcurrentExecutor.runConcurrent(TestUtil.subscriber(bus, listeners), ConcurrentUnits);
Runnable publishAndCheck = new Runnable() {
@Override
public void run() {
bus.post(new IncrementingMessage()).now();
}
};
// single threaded
ConcurrentExecutor.runConcurrent(publishAndCheck, 1);
// multi threaded
ConcurrentExecutor.runConcurrent(publishAndCheck, ConcurrentUnits);
}
public static class MBassadorTest extends SyncBusTest {
//DS: added errorHandler parameter to allow adding handler from caller
@Override
protected GenericMessagePublicationSupport getSyncMessageBus(boolean failOnException, IPublicationErrorHandler errorHandler) {
IBusConfiguration asyncFIFOConfig = new BusConfiguration().addPublicationErrorHandler(new AssertionErrorHandler(failOnException));
asyncFIFOConfig.addFeature(Feature.SyncPubSub.Default());
asyncFIFOConfig.addFeature(Feature.AsynchronousHandlerInvocation.Default(1, 1));
asyncFIFOConfig.addFeature(Feature.AsynchronousMessageDispatch.Default().setNumberOfMessageDispatchers(1));
if (errorHandler != null) {
asyncFIFOConfig.addPublicationErrorHandler(errorHandler);
}
return new MBassador(asyncFIFOConfig);
}
@Override
protected GenericMessagePublicationSupport getSyncMessageBus(boolean failOnException) {
return getSyncMessageBus(failOnException, null);
}
}
public static class SyncMessageBusTest extends SyncBusTest {
@Override
protected GenericMessagePublicationSupport getSyncMessageBus(boolean failOnException, IPublicationErrorHandler errorHandler) {
IBusConfiguration syncPubSubCfg = new BusConfiguration().addPublicationErrorHandler(new AssertionErrorHandler(failOnException));
syncPubSubCfg.addFeature(Feature.SyncPubSub.Default());
if (errorHandler != null) {
syncPubSubCfg.addPublicationErrorHandler(errorHandler);
}
return new SyncMessageBus(syncPubSubCfg);
}
@Override
protected GenericMessagePublicationSupport getSyncMessageBus(boolean failOnException) {
return getSyncMessageBus(failOnException, null);
}
}
static class IncrementingMessage{
private int count = 1;
public void markHandled(int newVal){
// only transitions by the next handler are allowed
if(count == newVal || count + 1 == newVal) count = newVal;
else throw new RuntimeException("Message was handled out of order");
}
}
public static class PrioritizedListener{
@Handler(priority = Integer.MIN_VALUE)
public void handle1(IncrementingMessage message) {
message.markHandled(4);
}
@Handler(priority = -2)
public void handle2(IncrementingMessage message) {
message.markHandled(3);
}
@Handler
public void handle3(IncrementingMessage message) {
message.markHandled(2);
}
@Handler(priority = Integer.MAX_VALUE)
public void handle4(IncrementingMessage message) {
message.markHandled(1);
}
}
}
| |
/*
* Copyright (c) 2016, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of Salesforce.com nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.dva.argus.entity;
import java.io.Serializable;
import java.text.MessageFormat;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import static com.salesforce.dva.argus.system.SystemAssert.requireArgument;
/**
* Time series annotation entity object. Encapsulates all information needed to represent an annotation.
*
* @author Tom Valine (tvaline@salesforce.com), Bhinav Sura (bhinav.sura@salesforce.com)
*/
@SuppressWarnings("serial")
public class Annotation extends TSDBEntity implements Serializable {
//~ Instance fields ******************************************************************************************************************************
private String _type;
private Map<String, String> _fields;
private Long _timestamp;
private String _source;
private String _id;
//~ Constructors *********************************************************************************************************************************
/**
* Creates a new Annotation object. If metric is null, the annotation is global within the specified scope, type and tags. Uniqueness of an
* annotation within the context of a scope, metric, tags and type is determined by the timestamp, source and ID fields. Multiple annotations can
* be stored at a single timestamp if the source and ID are unique. If there is an existing annotation having the same source ID at a given
* timestamp, it will be overwritten with the new value.
*
* @param source Used to describe the data source the annotation was collected from (e.g. SPLUNK). Cannot be null or empty.
* @param id The data source specific ID of the annotation (e.g. ID-82140). Cannot be null or empty.
* @param type Describes the category for an annotation (e.g. ERELEASE). Cannot be null or empty.
* @param scope The scope of the annotation object (e.g. NA1). Cannot be null or empty.
* @param metric The metric name associated with the metric. If not null, cannot be empty.
* @param timestamp The timestamp at which the annotated event occurs. Can not be null.
*/
public Annotation(String source, String id, String type, String scope, String metric, Long timestamp) {
this();
setSource(source);
setId(id);
setTimestamp(timestamp);
setMetric(metric);
setScope(scope);
setType(type);
}
/** Creates a new Metric object. */
protected Annotation() {
super(null, null);
_fields = new HashMap<>();
}
//~ Methods **************************************************************************************************************************************
/**
* Returns the size of the annotation in bytes.
*
* @return The size in bytes.
*/
public int computeSizeBytes() {
int size = computeLength(_source);
size += computeLength(_id);
size += computeLength(_type);
size += computeLength(getScope());
size += Long.BYTES; // size of timestamp field
for (Map.Entry<String, String> e : _fields.entrySet()) {
size += e.getKey().length();
size += e.getValue().length();
}
for (Map.Entry<String, String> e : getTags().entrySet()) {
size += e.getKey().length();
size += e.getValue().length();
}
size += computeLength(getUid());
size += computeLength(getMetric());
return size;
}
private int computeLength(String s) {
return s != null ? s.length() : 0;
}
/**
* Returns the source of the annotation.
*
* @return The source of the annotation. Will never be null.
*/
public String getSource() {
return _source;
}
/**
* Returns the ID of the annotation as indicated by the data source.
*
* @return The ID of the annotation. Will never be null.
*/
public String getId() {
return _id;
}
/**
* Returns the category of the metric.
*
* @return The category of the metric. Will never be null.
*/
public String getType() {
return _type;
}
/**
* Returns the user defined fields associated with the annotation. This information can be used to relate information about the annotation such as
* the event name, the associated user or any other relevant information.
*
* @return The user defined fields for the annotation. Will never be null but may be empty.
*/
public Map<String, String> getFields() {
return Collections.unmodifiableMap(_fields);
}
/**
* Returns the time stamp of the annotation.
*
* @return The time stamp of the annotation. Will never be null.
*/
public Long getTimestamp() {
return _timestamp;
}
/**
* Returns the hash code for the annotation, based on the scope, metric, tags, type, source, ID and timestamp.
*
* @return The hash code for the annotation.
*/
@Override
public int hashCode() {
int hash = 7;
hash = 71 * hash + super.hashCode();
hash = 71 * hash + (_type != null ? _type.hashCode() : 0);
hash = 71 * hash + (_timestamp != null ? _timestamp.hashCode() : 0);
hash = 71 * hash + (_source != null ? _source.hashCode() : 0);
hash = 71 * hash + (_id != null ? _id.hashCode() : 0);
return hash;
}
/**
* Determines if another object is equivalent to this annotation.
*
* @param obj The object with which to compare. Can be null.
*
* @return True if the object is an annotation having the same scope, metric, tags, type, source, ID and timestamp.
*/
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Annotation other = (Annotation) obj;
if (!super.equals(other)) {
return false;
}
if ((_type == null) ? (other._type != null) : !_type.equals(other._type)) {
return false;
}
if ((_timestamp == null) ? (other._timestamp != null) : !_timestamp.equals(other._timestamp)) {
return false;
}
if ((_source == null) ? (other._source != null) : !_source.equals(other._source)) {
return false;
}
if ((_id == null) ? (other._id != null) : !_id.equals(other._id)) {
return false;
}
return true;
}
/**
* Sets the scope of the collection of the annotation.
*
* @param scope The scope of the collection. Cannot be null or empty.
*/
@Override
public void setScope(String scope) {
requireArgument(scope != null && !scope.trim().isEmpty(), "Scope cannot be null or empty.");
super.setScope(scope);
}
/**
* Sets the metric with which the annotation is associated.
*
* @param metric The metric with which the annotation is associated. If not null, it cannot be empty.
*/
@Override
public void setMetric(String metric) {
requireArgument(metric == null || !metric.trim().isEmpty(), "Metric can be null, but if specified, cannot be empty");
super.setMetric(metric);
}
/**
* Sets the category of the metric.
*
* @param type The category of the metric. Cannot be null or empty.
*/
private void setType(String type) {
requireArgument(type != null && !type.trim().isEmpty(), "Type cannot be null or empty.");
_type = type;
}
/**
* Sets the time stamp at which the annotation exists.
*
* @param timestamp THe time stamp for the annotation. Cannot be null.
*/
public void setTimestamp(Long timestamp) {
requireArgument(timestamp != null, "Timestamp cannot be null.");
_timestamp = timestamp;
}
/**
* Sets the source of the annotation.
*
* @param source The source of the annotation. Cannot be null or empty.
*/
private void setSource(String source) {
requireArgument(source != null && !source.trim().isEmpty(), "Source cannot be null or empty.");
_source = source;
}
/**
* Sets the ID of the annotation as indicated by the data source.
*
* @param id The ID of the annotation. Will never be null.
*/
private void setId(String id) {
requireArgument(id != null && !id.trim().isEmpty(), "ID cannot be null or empty.");
_id = id;
}
/**
* Replaces the user defined fields associated with the annotation. This information can be used to store information about the annotation such as
* the event name, the associated user or any other relevant information. Existing fields will always be deleted.
*
* @param fields The user defined fields. May be null.
*/
public void setFields(Map<String, String> fields) {
_fields.clear();
if (fields != null) {
_fields.putAll(fields);
}
}
@Override
public String toString() {
Object[] params = { getTimestamp(), getScope(), getMetric(), getTags(), getType(), getSource(), getId(), getFields() };
String format = "timestamp=>{0,number,#}, scope=>{1}, metric=>{2}, tags=>{3}, type=>{4}, source=>{5}, sourceId=>{6}, fields=>{7}";
return MessageFormat.format(format, params);
}
public static String getIdentifierFieldsAsString(Annotation annotation) {
return new StringBuilder(annotation.getScope()).append(":").append(annotation.getMetric()).append(":")
.append(annotation.getTags().toString()).append(":").append(annotation.getType()).append(":").append(annotation.getTimestamp()).toString();
}
}
/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */
| |
package dshell.internal.lib;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import dshell.lang.Errno;
public interface CauseInferencer {
public ArrayList<String> doInference(SubProc proc);
}
class CauseInferencer_ltrace implements CauseInferencer {
private final String mainName = "__libc_start_main";
private final String unfinished = "<unfinished ...>";
private final Pattern syscallPattern = Pattern.compile("(SYS_)(.+)(\\(.*\\))( +)(=)( +)(.+)");
private final Pattern noreturnSyscallPattern = Pattern.compile("(SYS_)(.+)(\\()(.+)( +)(<no return+)");
private final Pattern unfinishedFuncPattern = Pattern.compile("(.+)(\\(.*)( +)(" + unfinished + ")");
private final Pattern funcPattern = Pattern.compile("(.+)(\\(.*\\))( +)(=)( +)(.+)");
private final Pattern resumedPattern = Pattern.compile("(<.+)( +)(.+)( +)(resumed>.+\\))( +)(=)( +)(.+)");
private static enum IgnoreFunction {
setlocale,
dcgettext,
error,
error_at_line,
warn,
vwarn,
warnx,
vwarnx,
err,
verr,
errx,
verrx;
public static boolean match(String funcName) {
IgnoreFunction[] values = IgnoreFunction.values();
for(IgnoreFunction value : values) {
if(value.name().equals(funcName)) {
return true;
}
}
return false;
}
}
private CauseInferencer_ltrace() { // do nothing
}
public ArrayList<String> doInference(SubProc proc) {
String logFilePath = proc.getLogFilePath();
ArrayList<String[]> lineList = new ArrayList<String[]>();
try {
BufferedReader br = new BufferedReader(new FileReader(logFilePath));
String line;
boolean foundMain = false;
while((line = br.readLine()) != null) {
String[] splittedLine = this.splitLine(line);
if(foundMain || splittedLine[1].startsWith(mainName)) {
foundMain = true;
lineList.add(splittedLine);
}
}
br.close();
}
catch (FileNotFoundException e) {
e.printStackTrace();
System.exit(1);
}
catch (IOException e) {
e.printStackTrace();
System.exit(1);
}
FunctionContext topLevelContext = createTopLevelFuncContext(lineList);
proc.retValue = Integer.parseInt(topLevelContext.getRetValue());
return this.findCauseInfo(topLevelContext);
}
private String[] splitLine(String line) {
StringBuilder pidBuilder = new StringBuilder();
int startIndex = 0;
int size = line.length();
for(int i = 0; i < size; i++) {
char ch = line.charAt(i);
if(Character.isDigit(ch)) {
startIndex++;
pidBuilder.append(ch);
}
else if(ch == ' ') {
startIndex++;
break;
}
else {
Utils.fatal(1, "invalid line: " + line);
}
}
return new String[] {pidBuilder.toString(), line.substring(startIndex, size)};
}
private FunctionContext createTopLevelFuncContext(final ArrayList<String[]> lineList) {
if(lineList.size() == 0) {
Utils.fatal(1, "empty lineList");
}
String[] parsedInfo = lineList.get(0);
int pid = Integer.parseInt(parsedInfo[0]);
FunctionContext context = new FunctionContext(mainName, pid, null);
int index = 1;
do {
index = this.createFuncContext(lineList, context, index);
} while(index != -1 && context.getRetValue() == null);
return context;
}
private int createFuncContext(final ArrayList<String[]> lineList, final FunctionContext parentContext, final int index) {
if(index >= lineList.size()) {
Utils.fatal(1, "index = " + index + ", size = " + lineList.size());
}
String[] parsedInfo = lineList.get(index);
String calledFunc = parsedInfo[1];
if(calledFunc.startsWith("SYS_")) {
if(this.isExitSyscall(calledFunc)) {
SyscallContext exitContext = this.matchNoReturnSyscall(parsedInfo);
if(parentContext.funcName.equals(mainName)) {
parentContext.setRetValue(exitContext.param);
return index;
}
Utils.fatal(1, "invalid funcname: " + parentContext.funcName + ", " + calledFunc);
}
parentContext.appendFuncContext(this.matchSyscall(parsedInfo));
return index + 1;
}
if(calledFunc.startsWith("--")) {
return index + 1;
}
if(calledFunc.startsWith("++")) {
Utils.fatal(1, "match: " + calledFunc);
}
if(!calledFunc.startsWith("<")) {
if(this.isExitFunction(calledFunc)) {
FunctionContext exitContext = this.matchUnfinishedFunc(parsedInfo);
if(parentContext.funcName.equals(mainName)) {
if(calledFunc.startsWith("exit")) {
parentContext.setRetValue(exitContext.param);
}
else {
parentContext.setRetValue(exitContext.param.split(", ")[0]);
}
return index;
}
Utils.fatal(1, "invalid funcname: " + parentContext.funcName + ", " + calledFunc);
}
if(calledFunc.endsWith(unfinished)) {
FunctionContext unfinishedContext = this.matchUnfinishedFunc(parsedInfo);
int localIndex = index + 1;
do {
localIndex = this.createFuncContext(lineList, unfinishedContext, localIndex);
} while(localIndex != -1 && unfinishedContext.getRetValue() == null);
parentContext.appendFuncContext(unfinishedContext);
return localIndex;
}
parentContext.appendFuncContext(this.matchFunc(parsedInfo));
return index + 1;
}
else {
Matcher matcher = resumedPattern.matcher(calledFunc);
if(!matcher.find()) {
Utils.fatal(1, "not match: " + calledFunc);
}
if(matcher.group(3).equals(parentContext.funcName)) {
String ret = matcher.group(9);
parentContext.setRetValue(ret);
return index + 1;
}
}
Utils.fatal(1, "not match: " + calledFunc);
return -1;
}
private SyscallContext matchSyscall(String[] parsedInfo) {
int pid = Integer.parseInt(parsedInfo[0]);
Matcher matcher = syscallPattern.matcher(parsedInfo[1]);
if(!matcher.find()) {
Utils.fatal(1, "not match: " + parsedInfo[1]);
}
String syscallName = matcher.group(2);
String param = matcher.group(3);
String actualParam = param.substring(1, param.length() - 1);
SyscallContext context = new SyscallContext(syscallName, pid, actualParam);
context.setRetValue(matcher.group(7));
return context;
}
private SyscallContext matchNoReturnSyscall(String[] parsedInfo) {
int pid = Integer.parseInt(parsedInfo[0]);
Matcher matcher = noreturnSyscallPattern.matcher(parsedInfo[1]);
if(!matcher.find()) {
Utils.fatal(1, "not match: " + parsedInfo[1]);
}
String syscallName = matcher.group(2);
String param = matcher.group(4);
SyscallContext context = new SyscallContext(syscallName, pid, param);
return context;
}
private FunctionContext matchUnfinishedFunc(String[] parsedInfo) {
int pid = Integer.parseInt(parsedInfo[0]);
Matcher matcher = unfinishedFuncPattern.matcher(parsedInfo[1]);
if(!matcher.find()) {
Utils.fatal(1, "not match: " + parsedInfo[1]);
}
String funcName = matcher.group(1);
String param = matcher.group(2).substring(1);
return new FunctionContext(funcName, pid, param);
}
private FunctionContext matchFunc(String[] parsedInfo) {
int pid = Integer.parseInt(parsedInfo[0]);
Matcher matcher = funcPattern.matcher(parsedInfo[1]);
if(!matcher.find()) {
Utils.fatal(1, "not match: " + parsedInfo[1]);
}
String funcName = matcher.group(1);
String param = matcher.group(2);
String actualParam = param.substring(1, param.length() - 1);
String ret = matcher.group(6);
FunctionContext context = new FunctionContext(funcName, pid, actualParam);
context.setRetValue(ret);
return context;
}
private ArrayList<String> findCauseInfo(FunctionContext context) {
ArrayList<String> causeInfo = new ArrayList<String>();
if(context.getRetValue().equals("0")) {
causeInfo.add("empty");
causeInfo.add("empty");
causeInfo.add(Errno.SUCCESS.name());
}
else {
SyscallContext causedContext = this.findCausedContext(context);
if(causedContext == null) {
causeInfo.add("empty");
causeInfo.add("empty");
causeInfo.add(Errno.LAST_ELEMENT.name());
}
else {
causeInfo.add(causedContext.funcName);
causeInfo.add(causedContext.param);
String errnoString = Errno.toErrno((int)(-1 * causedContext.getExitStatus())).name();
causeInfo.add(errnoString);
}
}
return causeInfo;
}
private SyscallContext findCausedContext(FunctionContext parentContext) {
int size = parentContext.getFuncContextList().size();
for(int i = size - 1; i > -1; i--) {
FuncContextStub localContext = parentContext.getFuncContext(i);
if(localContext.failed) {
if(localContext instanceof SyscallContext) {
return (SyscallContext)localContext;
}
if(localContext instanceof FunctionContext && !IgnoreFunction.match(localContext.funcName)) {
return this.findCausedContext((FunctionContext)localContext);
}
}
}
return null;
}
private boolean isExitFunction(String calledFunc) {
if(calledFunc.startsWith("exit")) {
return true;
}
else if(calledFunc.startsWith("error")) {
if(!calledFunc.startsWith("error(0")) {
return true;
}
}
else if(calledFunc.startsWith("error_at_line")) {
if(!calledFunc.startsWith("error_at_line(0")) {
return true;
}
}
else if(calledFunc.startsWith("err")) {
if(!calledFunc.startsWith("err(0")) {
return true;
}
}
else if(calledFunc.startsWith("verr")) {
if(!calledFunc.startsWith("verr(0")) {
return true;
}
}
else if(calledFunc.startsWith("errx")) {
if(!calledFunc.startsWith("errx(0")) {
return true;
}
}
else if(calledFunc.startsWith("verrx")) {
if(!calledFunc.startsWith("verrx(0")) {
return true;
}
}
return false;
}
private boolean isExitSyscall(String calledFunc) {
if(calledFunc.startsWith("SYS_exit_group")) {
return true;
}
return false;
}
private static class Holder {
private static final CauseInferencer inferencer = new CauseInferencer_ltrace();
}
public static CauseInferencer getInferencer() {
return Holder.inferencer;
}
}
class FuncContextStub {
public final String funcName;
public final int pid;
public final String param;
private String retValue = null;
public boolean failed = false;
public FuncContextStub(String funcName, int pid, String param) {
this.funcName = funcName;
this.pid = pid;
this.param = param;
}
public void setRetValue(String retValue) {
this.retValue = retValue;
}
public String getRetValue() {
return this.retValue;
}
@Override
public String toString() {
return this.funcName;
}
}
class FunctionContext extends FuncContextStub {
private final ArrayList<FuncContextStub> funcContextList;
public FunctionContext(String funcName, int pid, String param) {
super(funcName, pid, param);
this.funcContextList = new ArrayList<FuncContextStub>();
}
public void appendFuncContext(FuncContextStub funcContext) {
this.funcContextList.add(funcContext);
if(!this.failed && funcContext.failed) {
this.failed = true;
}
}
public ArrayList<FuncContextStub> getFuncContextList() {
return this.funcContextList;
}
public FuncContextStub getFuncContext(int index) {
return this.funcContextList.get(index);
}
}
class SyscallContext extends FuncContextStub {
public SyscallContext(String funcName, int pid, String param) {
super(funcName, pid, param);
}
@Override
public void setRetValue(String retValue) {
long value = 0;
if(retValue.startsWith("0x")) {
value = Long.parseLong(retValue.substring(2), 16);
}
else {
if(retValue.startsWith("-")) {
this.failed = true;
}
value = Long.parseLong(retValue);
}
super.setRetValue("" + value);
}
public String getSyscallName() {
return this.funcName;
}
public long getExitStatus() {
return Long.parseLong(super.getRetValue());
}
@Override
public String toString() {
String suffix = "";
if(this.failed) {
suffix = " :" + this.getExitStatus();
}
return "SYS_" + super.toString() + suffix;
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2007-2015 Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.broad.igv.ui.util;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.broad.igv.Globals;
import org.broad.igv.ui.IGV;
import javax.swing.*;
import java.awt.*;
import java.lang.reflect.InvocationTargetException;
/**
* Provides thread-safe, Swing-safe, utilities for interacting with JOptionPane. Accounts for
* (1) Swing is not thread safe => synchronize access
* (2) JOptionPane methods must be invoked on event dispatch thread
*
* @author jrobinso
*/
public class MessageUtils {
private static Logger log = Logger.getLogger(MessageUtils.class);
// Somewhat silly class, needed to pass values between threads
static class ValueHolder {
Object value;
}
/**
* Log the exception and show {@code message} to the user
*
* @param e
* @param message
*/
public static void showErrorMessage(String message, Exception e) {
log.error(message, e);
showMessage(Level.ERROR, message);
}
public static void showMessage(String message) {
showMessage(Level.INFO, message);
}
public static synchronized void showMessage(Level level, String message) {
log.log(level, message);
boolean showDialog = !(Globals.isHeadless() || Globals.isSuppressMessages() || Globals.isTesting() || Globals.isBatch());
if (showDialog) {
UIUtilities.invokeOnEventThread (() -> {
// Always use HTML for message displays, but first remove any embedded <html> tags.
String dlgMessage = "<html>" + message.replaceAll("<html>", "");
Frame parent = IGV.hasInstance() ? IGV.getMainFrame() : null;
Color background = parent != null ? parent.getBackground() : Color.lightGray;
//JEditorPane So users can select text
JEditorPane content = new JEditorPane();
content.setContentType("text/html");
content.setText(dlgMessage);
content.setBackground(background);
content.setEditable(false);
Component dispMessage = content;
//Really long messages should be scrollable
if(dlgMessage.length() > 200){
Dimension size = new Dimension(1000, content.getHeight() + 100);
content.setPreferredSize(size);
JScrollPane pane = new JScrollPane(content);
dispMessage = pane;
}
JOptionPane.showMessageDialog(parent, dispMessage);
});
}
}
public static void setStatusBarMessage(final String message) {
log.debug("Status bar: " + message);
if (IGV.hasInstance()) {
IGV.getInstance().setStatusBarMessage(message);
}
}
public static synchronized boolean confirm(final String message) {
if(Globals.isHeadless()){
log.error("Attempted to confirm while running headless with the following message:\n" + message);
return true;
}
if(Globals.isBatch()) {
return true;
}
final Frame parent = IGV.hasInstance() ? IGV.getMainFrame() : null;
return confirm(parent, message);
}
/**
* Show a yes/no confirmation dialog.
*
* @param component
* @param message
* @return
*/
public static synchronized boolean confirm(final Component component, final String message) {
if(Globals.isHeadless() || Globals.isBatch()) {
return true;
}
if (SwingUtilities.isEventDispatchThread()) {
int opt = JOptionPane.showConfirmDialog(component, message, "Confirm", JOptionPane.YES_NO_OPTION);
return opt == JOptionPane.YES_OPTION;
} else {
final ValueHolder returnValue = new ValueHolder();
Runnable runnable = new Runnable() {
public void run() {
int opt = JOptionPane.showConfirmDialog(component, message, "Confirm", JOptionPane.YES_NO_OPTION);
returnValue.value = (opt == JOptionPane.YES_OPTION);
}
};
try {
SwingUtilities.invokeAndWait(runnable);
} catch (InterruptedException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e.getCause());
}
return (Boolean) (returnValue.value);
}
}
public static String showInputDialog(String message, final String defaultValue) {
final Frame parent = IGV.hasInstance() ? IGV.getMainFrame() : null;
//Pad message with spaces so it's as wide as the defaultValue
if(message.length() < defaultValue.length()){
message = String.format("%-" + defaultValue.length() + "s", message);
}
final String actMsg = message;
if (SwingUtilities.isEventDispatchThread()) {
String val = JOptionPane.showInputDialog(parent, actMsg, defaultValue);
return val;
} else {
final ValueHolder returnValue = new ValueHolder();
Runnable runnable = new Runnable() {
public void run() {
String val = JOptionPane.showInputDialog(parent, actMsg, defaultValue);
returnValue.value = val;
}
};
try {
SwingUtilities.invokeAndWait(runnable);
} catch (InterruptedException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e.getCause());
}
return (String) (returnValue.value);
}
}
public static String showInputDialog(final String message) {
final Frame parent = IGV.hasInstance() ? IGV.getMainFrame() : null;
if (SwingUtilities.isEventDispatchThread()) {
String val = JOptionPane.showInputDialog(parent, message);
return val;
} else {
final ValueHolder returnValue = new ValueHolder();
Runnable runnable = new Runnable() {
public void run() {
String val = JOptionPane.showInputDialog(parent, message);
returnValue.value = val;
}
};
try {
SwingUtilities.invokeAndWait(runnable);
} catch (InterruptedException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e.getCause());
}
return (String) (returnValue.value);
}
}
/**
* Test program - call all methods from both main and swing threads
*
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
Runnable runnable = new Runnable() {
public void run() {
showMessage("showMessage");
confirm("confirm");
confirm(null, "confirm with parent");
showInputDialog("showInputDialog", "default");
showInputDialog("showInputDialog");
}
};
// Test on main thread
runnable.run();
// Test on swing thread
SwingUtilities.invokeLater(runnable);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spi.discovery.tcp.ipfinder.cloud;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.base.Charsets;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.io.Files;
import org.apache.ignite.internal.IgniteInterruptedCheckedException;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.spi.IgniteSpiConfiguration;
import org.apache.ignite.spi.IgniteSpiException;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAdapter;
import org.jclouds.Constants;
import org.jclouds.ContextBuilder;
import org.jclouds.compute.ComputeService;
import org.jclouds.compute.ComputeServiceContext;
import org.jclouds.compute.domain.ComputeMetadata;
import org.jclouds.compute.domain.NodeMetadata;
import org.jclouds.domain.Credentials;
import org.jclouds.domain.Location;
import org.jclouds.googlecloud.GoogleCredentialsFromJson;
import org.jclouds.location.reference.LocationConstants;
/**
* IP finder for automatic lookup of nodes running in a cloud.
* <p>
* Implementation is based on Apache jclouds multi-cloud toolkit.
* For information about jclouds visit <a href="https://jclouds.apache.org/">jclouds.apache.org</a>.
* <h1 class="header">Configuration</h1>
* <h2 class="header">Mandatory</h2>
* <ul>
* <li>Cloud provider (see {@link #setProvider(String)})</li>
* <li>Identity (see {@link #setIdentity(String)})</li>
* </ul>
* <h2 class="header">Optional</h2>
* <ul>
* <li>Credential (see {@link #setCredential(String)})</li>
* <li>Credential path (see {@link #setCredentialPath(String)}</li>
* <li>Regions (see {@link #setRegions(Collection)})</li>
* <li>Zones (see {@link #setZones(Collection)}</li>
* </ul>
* </p>
* <p>
* The finder forms nodes addresses, that possibly running Ignite, by getting private and public IPs of all
* VMs in a cloud and adding a port number to them.
* The port is either the one that is set with {@link TcpDiscoverySpi#setLocalPort(int)} or
* {@link TcpDiscoverySpi#DFLT_PORT}.
* Make sure that all VMs start Ignite instances on the same port, otherwise they will not be able to discover each
* other using this IP finder.
* </p>
* <p>
* Both {@link #registerAddresses(Collection)} and {@link #unregisterAddresses(Collection)} has no effect.
* </p>
* <p>
* Note, this finder is only workable when it used directly by cloud VM.
* Choose another implementation of {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder} for local
* or home network tests.
* </p>
* <h2 class="header">Java Example</h2>
* <pre name="code" class="java">
* String accountId = "your_account_id";
* String accountKey = "your_account_key";
*
* TcpDiscoveryCloudIpFinder ipFinder = new TcpDiscoveryCloudIpFinder();
*
* ipFinder.setProvider("aws-ec2");
* ipFinder.setIdentity(accountId);
* ipFinder.setCredential(accountKey);
* ipFinder.setRegions(Collections.<String>emptyList().add("us-east-1"));
* ipFinder.setZones(Arrays.asList("us-east-1b", "us-east-1e"));
* </pre>
* <h2 class="header">Spring Example</h2>
* TcpDiscoveryCloudIpFinder can be configured from Spring XML configuration file:
* <pre name="code" class="xml">
* <bean id="grid.custom.cfg" class="org.apache.ignite.configuration.IgniteConfiguration" singleton="true">
* ...
* <property name="discoverySpi">
* <bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
* <property name="ipFinder">
* <bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.cloud.TcpDiscoveryCloudIpFinder"/>
* <property name="provider" value="google-compute-engine"/>
* <property name="identity" value="your_service_account_email"/>
* <property name="credentialPath" value="path_to_json_key"/>
* <property name="zones">
* <list>
* <value>us-central1-a</value>
* <value>asia-east1-a</value>
* </list>
* </property>
* </bean>
* </property>
*
* <property name="socketTimeout" value="400"/>
* </bean>
* </property>
* ...
* </bean>
* </pre>
* <p>
* <img src="http://ignite.apache.org/images/spring-small.png">
* <br>
* For information about Spring framework visit <a href="http://www.springframework.org/">www.springframework.org</a>
*/
public class TcpDiscoveryCloudIpFinder extends TcpDiscoveryIpFinderAdapter {
/** JCloud default connection timeout. */
private static final String JCLOUD_CONNECTION_TIMEOUT = "10000"; //10 secs
/** Cloud provider. */
private String provider;
/** Cloud specific identity (user name, email address, etc.). */
private String identity;
/** Cloud specific credential (password, access key, etc.). */
@GridToStringExclude
private String credential;
/** Path to a cloud specific credential. */
@GridToStringExclude
private String credentialPath;
/** Regions where VMs are located. */
private TreeSet<String> regions;
/** Zones where VMs are located. */
private TreeSet<String> zones;
/** Nodes filter by regions and zones. */
private Predicate<ComputeMetadata> nodesFilter;
/** Init guard. */
@GridToStringExclude
private final AtomicBoolean initGuard = new AtomicBoolean();
/** Init latch. */
@GridToStringExclude
private final CountDownLatch initLatch = new CountDownLatch(1);
/** JCloud compute service. */
private ComputeService computeService;
/**
* Constructor.
*/
public TcpDiscoveryCloudIpFinder() {
setShared(true);
}
/** {@inheritDoc} */
@Override public Collection<InetSocketAddress> getRegisteredAddresses() throws IgniteSpiException {
initComputeService();
Collection<InetSocketAddress> addresses = new LinkedList<>();
try {
Set<NodeMetadata> nodes;
if (nodesFilter != null)
nodes = (Set<NodeMetadata>)computeService.listNodesDetailsMatching(nodesFilter);
else {
nodes = new HashSet<>();
for (ComputeMetadata metadata : computeService.listNodes())
nodes.add(computeService.getNodeMetadata(metadata.getId()));
}
for (NodeMetadata metadata : nodes) {
if (metadata.getStatus() != NodeMetadata.Status.RUNNING)
continue;
for (String addr : metadata.getPrivateAddresses())
addresses.add(new InetSocketAddress(addr, 0));
for (String addr : metadata.getPublicAddresses())
addresses.add(new InetSocketAddress(addr, 0));
}
}
catch (Exception e) {
throw new IgniteSpiException("Failed to get registered addresses for the provider: " + provider, e);
}
return addresses;
}
/** {@inheritDoc} */
@Override public void registerAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
// No-op
}
/** {@inheritDoc} */
@Override public void unregisterAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
// No-op
}
/**
* Sets the cloud provider to use.
*
* <a href="https://jclouds.apache.org/reference/providers/#compute">Apache jclouds providers list</a> from
* ComputeService section contains names of all supported providers.
*
* @param provider Provider name.
* @return {@code this} for chaining.
*/
@IgniteSpiConfiguration(optional = false)
public TcpDiscoveryCloudIpFinder setProvider(String provider) {
this.provider = provider;
return this;
}
/**
* Sets the identity that is used as a user name during a connection to the cloud.
* Depending on a cloud platform it can be an email address, user name, etc.
*
* Refer to <a href="http://jclouds.apache.org/guides/">Apache jclouds guide</a> to get concrete information on
* what is used as an identity for a particular cloud platform.
*
* @param identity Identity to use during authentication on the cloud.
* @return {@code this} for chaining.
*/
@IgniteSpiConfiguration(optional = false)
public TcpDiscoveryCloudIpFinder setIdentity(String identity) {
this.identity = identity;
return this;
}
/**
* Sets credential that is used during authentication on the cloud.
* Depending on a cloud platform it can be a password or access key.
*
* Refer to <a href="http://jclouds.apache.org/guides/">Apache jclouds guide</a> to get concrete information on
* what is used as an credential for a particular cloud platform.
*
* @param credential Credential to use during authentication on the cloud.
* @return {@code this} for chaining.
*/
@IgniteSpiConfiguration(optional = true)
public TcpDiscoveryCloudIpFinder setCredential(String credential) {
this.credential = credential;
return this;
}
/**
* Sets the path to a credential that is used during authentication on the cloud.
*
* This method should be used when an access key or private key is stored in a file.
* Content of the file, referred by @{code credentialPath}, is fully read and used as a access key or private key
* during authentication.
*
* Refer to <a href="http://jclouds.apache.org/guides/">Apache jclouds guide</a> to get concrete information on
* what is used as an credential for a particular cloud platform.
*
* @param credentialPath Path to the credential to use during authentication on the cloud.
* @return {@code this} for chaining.
*/
@IgniteSpiConfiguration(optional = true)
public TcpDiscoveryCloudIpFinder setCredentialPath(String credentialPath) {
this.credentialPath = credentialPath;
return this;
}
/**
* Sets list of zones where VMs are located.
*
* If the zones are not set then every zone from regions, set by {@link #setRegions(Collection)}}, will be
* taken into account.
*
* Note, that some cloud providers, like Rackspace, doesn't have a notion of a zone. For such
* providers a call to this method is redundant.
*
* @param zones Zones where VMs are located or null if to take every zone into account.
* @return {@code this} for chaining.
*/
@IgniteSpiConfiguration(optional = true)
public TcpDiscoveryCloudIpFinder setZones(Collection<String> zones) {
if (!F.isEmpty(zones))
this.zones = new TreeSet<>(zones);
return this;
}
/**
* Sets list of regions where VMs are located.
*
* If the regions are not set then every region, that a cloud provider has, will be investigated. This could lead
* to significant performance degradation.
*
* Note, that some cloud providers, like Google Compute Engine, doesn't have a notion of a region. For such
* providers a call to this method is redundant.
*
* @param regions Regions where VMs are located or null if to check every region a provider has.
* @return {@code this} for chaining.
*/
@IgniteSpiConfiguration(optional = true)
public TcpDiscoveryCloudIpFinder setRegions(Collection<String> regions) {
if (!F.isEmpty(regions))
this.regions = new TreeSet<>(regions);
return this;
}
/**
* Initializes Apache jclouds compute service.
*/
private void initComputeService() {
if (initGuard.compareAndSet(false, true))
try {
if (provider == null)
throw new IgniteSpiException("Cloud provider is not set.");
if (identity == null)
throw new IgniteSpiException("Cloud identity is not set.");
if (credential != null && credentialPath != null)
throw new IgniteSpiException("Both credential and credentialPath are set. Use only one method.");
if (credentialPath != null)
credential = getCredentialFromFile();
try {
ContextBuilder ctxBuilder = ContextBuilder.newBuilder(provider);
ctxBuilder.credentials(identity, credential);
Properties properties = new Properties();
properties.setProperty(Constants.PROPERTY_SO_TIMEOUT, JCLOUD_CONNECTION_TIMEOUT);
properties.setProperty(Constants.PROPERTY_CONNECTION_TIMEOUT, JCLOUD_CONNECTION_TIMEOUT);
if (!F.isEmpty(regions))
properties.setProperty(LocationConstants.PROPERTY_REGIONS, keysSetToStr(regions));
if (!F.isEmpty(zones))
properties.setProperty(LocationConstants.PROPERTY_ZONES, keysSetToStr(zones));
ctxBuilder.overrides(properties);
computeService = ctxBuilder.buildView(ComputeServiceContext.class).getComputeService();
if (!F.isEmpty(zones) || !F.isEmpty(regions)) {
nodesFilter = new Predicate<ComputeMetadata>() {
@Override public boolean apply(ComputeMetadata computeMetadata) {
String region = null;
String zone = null;
Location location = computeMetadata.getLocation();
while (location != null) {
switch (location.getScope()) {
case ZONE:
zone = location.getId();
break;
case REGION:
region = location.getId();
break;
}
location = location.getParent();
}
if (regions != null && region != null && !regions.contains(region))
return false;
if (zones != null && zone != null && !zones.contains(zone))
return false;
return true;
}
};
}
}
catch (Exception e) {
throw new IgniteSpiException("Failed to connect to the provider: " + provider, e);
}
}
finally {
initLatch.countDown();
}
else
{
try {
U.await(initLatch);
}
catch (IgniteInterruptedCheckedException e) {
throw new IgniteSpiException("Thread has been interrupted.", e);
}
if (computeService == null)
throw new IgniteSpiException("Ip finder has not been initialized properly.");
}
}
/**
* Reads credential info from {@link #credentialPath} and returns in a string format.
*
* @return Credential in {@code String} representation.
* @throws IgniteSpiException In case of error.
*/
private String getCredentialFromFile() throws IgniteSpiException {
try {
String fileContents = Files.toString(new File(credentialPath), Charsets.UTF_8);
if (provider.equals("google-compute-engine")) {
Supplier<Credentials> credentialSupplier = new GoogleCredentialsFromJson(fileContents);
return credentialSupplier.get().credential;
}
return fileContents;
}
catch (IOException e) {
throw new IgniteSpiException("Failed to retrieve the private key from the file: " + credentialPath, e);
}
}
/**
* Converts set keys to string.
*
* @param set Set.
* @return String where keys delimited by ','.
*/
private String keysSetToStr(Set<String> set) {
Iterator<String> iter = set.iterator();
StringBuilder builder = new StringBuilder();
while (iter.hasNext()) {
builder.append(iter.next());
if (iter.hasNext())
builder.append(',');
}
return builder.toString();
}
/** {@inheritDoc} */
@Override public TcpDiscoveryCloudIpFinder setShared(boolean shared) {
super.setShared(shared);
return this;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(TcpDiscoveryCloudIpFinder.class, this);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.scripting;
import java.io.IOException;
import java.io.Reader;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import javax.script.AbstractScriptEngine;
import javax.script.Bindings;
import javax.script.Compilable;
import javax.script.CompiledScript;
import javax.script.ScriptContext;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineFactory;
import javax.script.ScriptException;
import javax.script.SimpleBindings;
import org.flowable.engine.common.api.FlowableException;
import org.flowable.engine.common.impl.de.odysseus.el.util.SimpleResolver;
import org.flowable.engine.common.impl.javax.el.ArrayELResolver;
import org.flowable.engine.common.impl.javax.el.BeanELResolver;
import org.flowable.engine.common.impl.javax.el.CompositeELResolver;
import org.flowable.engine.common.impl.javax.el.ELContext;
import org.flowable.engine.common.impl.javax.el.ELException;
import org.flowable.engine.common.impl.javax.el.ELResolver;
import org.flowable.engine.common.impl.javax.el.ExpressionFactory;
import org.flowable.engine.common.impl.javax.el.FunctionMapper;
import org.flowable.engine.common.impl.javax.el.ListELResolver;
import org.flowable.engine.common.impl.javax.el.MapELResolver;
import org.flowable.engine.common.impl.javax.el.ResourceBundleELResolver;
import org.flowable.engine.common.impl.javax.el.ValueExpression;
import org.flowable.engine.common.impl.javax.el.VariableMapper;
import org.flowable.engine.impl.bpmn.data.ItemInstance;
import org.flowable.engine.impl.el.DynamicBeanPropertyELResolver;
import org.flowable.engine.impl.el.ExpressionFactoryResolver;
import org.flowable.engine.impl.el.JsonNodeELResolver;
import org.flowable.engine.impl.util.ReflectUtil;
/**
* ScriptEngine that used JUEL for script evaluation and compilation (JSR-223).
*
* Uses EL 1.1 if available, to resolve expressions. Otherwise it reverts to EL 1.0, using {@link ExpressionFactoryResolver}.
*
* @author Frederik Heremans
*/
public class JuelScriptEngine extends AbstractScriptEngine implements Compilable {
private ScriptEngineFactory scriptEngineFactory;
private ExpressionFactory expressionFactory;
public JuelScriptEngine(ScriptEngineFactory scriptEngineFactory) {
this.scriptEngineFactory = scriptEngineFactory;
// Resolve the ExpressionFactory
expressionFactory = ExpressionFactoryResolver.resolveExpressionFactory();
}
public JuelScriptEngine() {
this(null);
}
public CompiledScript compile(String script) throws ScriptException {
ValueExpression expr = parse(script, context);
return new JuelCompiledScript(expr);
}
public CompiledScript compile(Reader reader) throws ScriptException {
// Create a String based on the reader and compile it
return compile(readFully(reader));
}
public Object eval(String script, ScriptContext scriptContext) throws ScriptException {
ValueExpression expr = parse(script, scriptContext);
return evaluateExpression(expr, scriptContext);
}
public Object eval(Reader reader, ScriptContext scriptContext) throws ScriptException {
return eval(readFully(reader), scriptContext);
}
public ScriptEngineFactory getFactory() {
synchronized (this) {
if (scriptEngineFactory == null) {
scriptEngineFactory = new JuelScriptEngineFactory();
}
}
return scriptEngineFactory;
}
public Bindings createBindings() {
return new SimpleBindings();
}
private Object evaluateExpression(ValueExpression expr, ScriptContext ctx) throws ScriptException {
try {
return expr.getValue(createElContext(ctx));
} catch (ELException elexp) {
throw new ScriptException(elexp);
}
}
private ELResolver createElResolver() {
CompositeELResolver compositeResolver = new CompositeELResolver();
compositeResolver.add(new ArrayELResolver());
compositeResolver.add(new ListELResolver());
compositeResolver.add(new MapELResolver());
compositeResolver.add(new JsonNodeELResolver());
compositeResolver.add(new ResourceBundleELResolver());
compositeResolver.add(new DynamicBeanPropertyELResolver(ItemInstance.class, "getFieldValue", "setFieldValue"));
compositeResolver.add(new BeanELResolver());
return new SimpleResolver(compositeResolver);
}
private String readFully(Reader reader) throws ScriptException {
char[] array = new char[8192];
StringBuilder strBuffer = new StringBuilder();
int count;
try {
while ((count = reader.read(array, 0, array.length)) > 0) {
strBuffer.append(array, 0, count);
}
} catch (IOException exp) {
throw new ScriptException(exp);
}
return strBuffer.toString();
}
private ValueExpression parse(String script, ScriptContext scriptContext) throws ScriptException {
try {
return expressionFactory.createValueExpression(createElContext(scriptContext), script, Object.class);
} catch (ELException ele) {
throw new ScriptException(ele);
}
}
private ELContext createElContext(final ScriptContext scriptCtx) {
// Check if the ELContext is already stored on the ScriptContext
Object existingELCtx = scriptCtx.getAttribute("elcontext");
if (existingELCtx instanceof ELContext) {
return (ELContext) existingELCtx;
}
scriptCtx.setAttribute("context", scriptCtx, ScriptContext.ENGINE_SCOPE);
// Built-in function are added to ScriptCtx
scriptCtx.setAttribute("out:print", getPrintMethod(), ScriptContext.ENGINE_SCOPE);
SecurityManager securityManager = System.getSecurityManager();
if (securityManager == null) {
scriptCtx.setAttribute("lang:import", getImportMethod(), ScriptContext.ENGINE_SCOPE);
}
ELContext elContext = new ELContext() {
ELResolver resolver = createElResolver();
VariableMapper varMapper = new ScriptContextVariableMapper(scriptCtx);
FunctionMapper funcMapper = new ScriptContextFunctionMapper(scriptCtx);
@Override
public ELResolver getELResolver() {
return resolver;
}
@Override
public VariableMapper getVariableMapper() {
return varMapper;
}
@Override
public FunctionMapper getFunctionMapper() {
return funcMapper;
}
};
// Store the elcontext in the scriptContext to be able to reuse
scriptCtx.setAttribute("elcontext", elContext, ScriptContext.ENGINE_SCOPE);
return elContext;
}
private static Method getPrintMethod() {
try {
return JuelScriptEngine.class.getMethod("print", new Class[] { Object.class });
} catch (Exception exp) {
// Will never occur
return null;
}
}
public static void print(Object object) {
System.out.print(object);
}
private static Method getImportMethod() {
try {
return JuelScriptEngine.class.getMethod("importFunctions", new Class[] { ScriptContext.class, String.class, Object.class });
} catch (Exception exp) {
// Will never occur
return null;
}
}
public static void importFunctions(ScriptContext ctx, String namespace, Object obj) {
Class<?> clazz = null;
if (obj instanceof Class) {
clazz = (Class<?>) obj;
} else if (obj instanceof String) {
try {
clazz = ReflectUtil.loadClass((String) obj);
} catch (FlowableException ae) {
throw new ELException(ae);
}
} else {
throw new ELException("Class or class name is missing");
}
Method[] methods = clazz.getMethods();
for (Method m : methods) {
int mod = m.getModifiers();
if (Modifier.isStatic(mod) && Modifier.isPublic(mod)) {
String name = namespace + ":" + m.getName();
ctx.setAttribute(name, m, ScriptContext.ENGINE_SCOPE);
}
}
}
/**
* Class representing a compiled script using JUEL.
*
* @author Frederik Heremans
*/
private class JuelCompiledScript extends CompiledScript {
private ValueExpression valueExpression;
JuelCompiledScript(ValueExpression valueExpression) {
this.valueExpression = valueExpression;
}
public ScriptEngine getEngine() {
// Return outer class instance
return JuelScriptEngine.this;
}
public Object eval(ScriptContext ctx) throws ScriptException {
return evaluateExpression(valueExpression, ctx);
}
}
/**
* ValueMapper that uses the ScriptContext to get variable values or value expressions.
*
* @author Frederik Heremans
*/
private class ScriptContextVariableMapper extends VariableMapper {
private ScriptContext scriptContext;
ScriptContextVariableMapper(ScriptContext scriptCtx) {
this.scriptContext = scriptCtx;
}
@Override
public ValueExpression resolveVariable(String variableName) {
int scope = scriptContext.getAttributesScope(variableName);
if (scope != -1) {
Object value = scriptContext.getAttribute(variableName, scope);
if (value instanceof ValueExpression) {
// Just return the existing ValueExpression
return (ValueExpression) value;
} else {
// Create a new ValueExpression based on the variable value
return expressionFactory.createValueExpression(value, Object.class);
}
}
return null;
}
@Override
public ValueExpression setVariable(String name, ValueExpression value) {
ValueExpression previousValue = resolveVariable(name);
scriptContext.setAttribute(name, value, ScriptContext.ENGINE_SCOPE);
return previousValue;
}
}
/**
* FunctionMapper that uses the ScriptContext to resolve functions in EL.
*
* @author Frederik Heremans
*/
private class ScriptContextFunctionMapper extends FunctionMapper {
private ScriptContext scriptContext;
ScriptContextFunctionMapper(ScriptContext ctx) {
this.scriptContext = ctx;
}
private String getFullFunctionName(String prefix, String localName) {
return prefix + ":" + localName;
}
@Override
public Method resolveFunction(String prefix, String localName) {
String functionName = getFullFunctionName(prefix, localName);
int scope = scriptContext.getAttributesScope(functionName);
if (scope != -1) {
// Methods are added as variables in the ScriptScope
Object attributeValue = scriptContext.getAttribute(functionName);
return (attributeValue instanceof Method) ? (Method) attributeValue : null;
} else {
return null;
}
}
}
}
| |
package com.thinkbiganalytics.metadata.jpa.feed;
/*-
* #%L
* thinkbig-operational-metadata-jpa
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.thinkbiganalytics.metadata.api.feed.FeedSummary;
import com.thinkbiganalytics.metadata.api.feed.OpsManagerFeed;
import com.thinkbiganalytics.metadata.api.jobrepo.ExecutionConstants;
import com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecution;
import org.hibernate.annotations.Immutable;
import org.hibernate.annotations.Parameter;
import org.hibernate.annotations.Type;
import org.joda.time.DateTime;
import java.io.Serializable;
import java.util.UUID;
import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Table;
@Entity
@Table(name = "FEED_SUMMARY_VIEW")
@Immutable
public class JpaFeedSummary implements FeedSummary {
@EmbeddedId
private JpaFeedSummaryId feedSummaryId;
@Column(name = "FEED_ID", insertable = false, updatable = false)
private UUID feedId;
@Column(name = "FEED_NAME", insertable = false, updatable = false)
private String feedName;
@Enumerated(EnumType.STRING)
@Column(name = "FEED_TYPE")
private OpsManagerFeed.FeedType feedType = OpsManagerFeed.FeedType.FEED;
@Column(name = "IS_STREAM", length = 1)
@org.hibernate.annotations.Type(type = "yes_no")
private boolean isStream;
@Column(name = "JOB_EXECUTION_ID", insertable = false, updatable = false)
private Long jobExecutionId;
@Column(name = "JOB_INSTANCE_ID", insertable = false, updatable = false)
private Long jobInstanceId;
@Type(type = "com.thinkbiganalytics.jpa.PersistentDateTimeAsMillisLong")
@Column(name = "START_TIME")
private DateTime startTime;
@Type(type = "com.thinkbiganalytics.jpa.PersistentDateTimeAsMillisLong")
@Column(name = "END_TIME")
private DateTime endTime;
@Enumerated(EnumType.STRING)
@Column(name = "STATUS", length = 10, nullable = false)
private BatchJobExecution.JobStatus status = BatchJobExecution.JobStatus.UNKNOWN;
@Enumerated(EnumType.STRING)
@Column(name = "EXIT_CODE")
private ExecutionConstants.ExitCode exitCode = ExecutionConstants.ExitCode.UNKNOWN;
@Column(name = "EXIT_MESSAGE")
@Type(type = "com.thinkbiganalytics.jpa.TruncateStringUserType", parameters = {@Parameter(name = "length", value = "2500")})
private String exitMessage;
@Enumerated(EnumType.STRING)
@Column(name = "RUN_STATUS", insertable = false, updatable = false)
private RunStatus runStatus;
@Column(name = "ALL_COUNT")
private Long allCount;
@Column(name = "FAILED_COUNT")
private Long failedCount;
@Column(name = "COMPLETED_COUNT")
private Long completedCount;
@Column(name = "ABANDONED_COUNT")
private Long abandonedCount;
@Column(name = "RUNNING_COUNT", insertable = false, updatable = false)
private Long runningCount;
public JpaFeedSummaryId getFeedSummaryId() {
return feedSummaryId;
}
public void setFeedSummaryId(JpaFeedSummaryId feedSummaryId) {
this.feedSummaryId = feedSummaryId;
}
public void setFeedId(UUID feedId) {
this.feedId = feedId;
}
@Override
public UUID getFeedId() {
return feedId;
}
@Override
public String getFeedIdAsString() {
return feedId.toString();
}
public void setFeedId(String feedId) {
this.feedId = UUID.fromString(feedId);
}
@Override
public String getFeedName() {
return feedName;
}
public void setFeedName(String feedName) {
this.feedName = feedName;
}
@Override
public OpsManagerFeed.FeedType getFeedType() {
return feedType;
}
public void setFeedType(OpsManagerFeed.FeedType feedType) {
this.feedType = feedType;
}
@Override
public boolean isStream() {
return isStream;
}
public void setStream(boolean stream) {
isStream = stream;
}
@Override
public Long getJobExecutionId() {
return jobExecutionId;
}
public void setJobExecutionId(Long jobExecutionId) {
this.jobExecutionId = jobExecutionId;
}
@Override
public Long getJobInstanceId() {
return jobInstanceId;
}
public void setJobInstanceId(Long jobInstanceId) {
this.jobInstanceId = jobInstanceId;
}
@Override
public DateTime getStartTime() {
return startTime;
}
public void setStartTime(DateTime startTime) {
this.startTime = startTime;
}
@Override
public DateTime getEndTime() {
return endTime;
}
public void setEndTime(DateTime endTime) {
this.endTime = endTime;
}
@Override
public BatchJobExecution.JobStatus getStatus() {
return status;
}
public void setStatus(BatchJobExecution.JobStatus status) {
this.status = status;
}
@Override
public ExecutionConstants.ExitCode getExitCode() {
return exitCode;
}
public void setExitCode(ExecutionConstants.ExitCode exitCode) {
this.exitCode = exitCode;
}
@Override
public String getExitMessage() {
return exitMessage;
}
public void setExitMessage(String exitMessage) {
this.exitMessage = exitMessage;
}
@Override
public RunStatus getRunStatus() {
return runStatus;
}
public void setRunStatus(RunStatus runStatus) {
this.runStatus = runStatus;
}
public Long getAllCount() {
return allCount;
}
public void setAllCount(Long allCount) {
this.allCount = allCount;
}
public Long getFailedCount() {
return failedCount;
}
public void setFailedCount(Long failedCount) {
this.failedCount = failedCount;
}
public Long getCompletedCount() {
return completedCount;
}
public void setCompletedCount(Long completedCount) {
this.completedCount = completedCount;
}
public Long getAbandonedCount() {
return abandonedCount;
}
public void setAbandonedCount(Long abandonedCount) {
this.abandonedCount = abandonedCount;
}
public Long getRunningCount() {
return runningCount;
}
public void setRunningCount(Long runningCount) {
this.runningCount = runningCount;
}
@Embeddable
public static class JpaFeedSummaryId implements Serializable, FeedSummary.ID {
private static final long serialVersionUID = 6017751710414995750L;
@Column(name = "ID")
private String id;
public JpaFeedSummaryId() {
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JpaFeedSummaryId that = (JpaFeedSummaryId) o;
return id != null ? id.equals(that.id) : that.id == null;
}
@Override
public int hashCode() {
return id != null ? id.hashCode() : 0;
}
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver13;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnTlvKnownMulticastRateVer13 implements OFBsnTlvKnownMulticastRate {
private static final Logger logger = LoggerFactory.getLogger(OFBsnTlvKnownMulticastRateVer13.class);
// version: 1.3
final static byte WIRE_VERSION = 4;
final static int LENGTH = 8;
private final static long DEFAULT_VALUE = 0x0L;
// OF message fields
private final long value;
//
// Immutable default instance
final static OFBsnTlvKnownMulticastRateVer13 DEFAULT = new OFBsnTlvKnownMulticastRateVer13(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFBsnTlvKnownMulticastRateVer13(long value) {
this.value = U32.normalize(value);
}
// Accessors for OF message fields
@Override
public int getType() {
return 0x5b;
}
@Override
public long getValue() {
return value;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
public OFBsnTlvKnownMulticastRate.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnTlvKnownMulticastRate.Builder {
final OFBsnTlvKnownMulticastRateVer13 parentMessage;
// OF message fields
private boolean valueSet;
private long value;
BuilderWithParent(OFBsnTlvKnownMulticastRateVer13 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public int getType() {
return 0x5b;
}
@Override
public long getValue() {
return value;
}
@Override
public OFBsnTlvKnownMulticastRate.Builder setValue(long value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFBsnTlvKnownMulticastRate build() {
long value = this.valueSet ? this.value : parentMessage.value;
//
return new OFBsnTlvKnownMulticastRateVer13(
value
);
}
}
static class Builder implements OFBsnTlvKnownMulticastRate.Builder {
// OF message fields
private boolean valueSet;
private long value;
@Override
public int getType() {
return 0x5b;
}
@Override
public long getValue() {
return value;
}
@Override
public OFBsnTlvKnownMulticastRate.Builder setValue(long value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
//
@Override
public OFBsnTlvKnownMulticastRate build() {
long value = this.valueSet ? this.value : DEFAULT_VALUE;
return new OFBsnTlvKnownMulticastRateVer13(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnTlvKnownMulticastRate> {
@Override
public OFBsnTlvKnownMulticastRate readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 0x5b
short type = bb.readShort();
if(type != (short) 0x5b)
throw new OFParseError("Wrong type: Expected=0x5b(0x5b), got="+type);
int length = U16.f(bb.readShort());
if(length != 8)
throw new OFParseError("Wrong length: Expected=8(8), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long value = U32.f(bb.readInt());
OFBsnTlvKnownMulticastRateVer13 bsnTlvKnownMulticastRateVer13 = new OFBsnTlvKnownMulticastRateVer13(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnTlvKnownMulticastRateVer13);
return bsnTlvKnownMulticastRateVer13;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnTlvKnownMulticastRateVer13Funnel FUNNEL = new OFBsnTlvKnownMulticastRateVer13Funnel();
static class OFBsnTlvKnownMulticastRateVer13Funnel implements Funnel<OFBsnTlvKnownMulticastRateVer13> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnTlvKnownMulticastRateVer13 message, PrimitiveSink sink) {
// fixed value property type = 0x5b
sink.putShort((short) 0x5b);
// fixed value property length = 8
sink.putShort((short) 0x8);
sink.putLong(message.value);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnTlvKnownMulticastRateVer13> {
@Override
public void write(ByteBuf bb, OFBsnTlvKnownMulticastRateVer13 message) {
// fixed value property type = 0x5b
bb.writeShort((short) 0x5b);
// fixed value property length = 8
bb.writeShort((short) 0x8);
bb.writeInt(U32.t(message.value));
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnTlvKnownMulticastRateVer13(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnTlvKnownMulticastRateVer13 other = (OFBsnTlvKnownMulticastRateVer13) obj;
if( value != other.value)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (value ^ (value >>> 32));
return result;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.seekablestream.supervisor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.druid.data.input.impl.DimensionSchema;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.JsonInputFormat;
import org.apache.druid.data.input.impl.StringDimensionSchema;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.indexing.common.TestUtils;
import org.apache.druid.indexing.common.stats.RowIngestionMetersFactory;
import org.apache.druid.indexing.common.task.Task;
import org.apache.druid.indexing.common.task.TaskResource;
import org.apache.druid.indexing.overlord.DataSourceMetadata;
import org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator;
import org.apache.druid.indexing.overlord.TaskMaster;
import org.apache.druid.indexing.overlord.TaskQueue;
import org.apache.druid.indexing.overlord.TaskRunner;
import org.apache.druid.indexing.overlord.TaskRunnerListener;
import org.apache.druid.indexing.overlord.TaskStorage;
import org.apache.druid.indexing.overlord.supervisor.SupervisorStateManager;
import org.apache.druid.indexing.overlord.supervisor.SupervisorStateManager.BasicState;
import org.apache.druid.indexing.overlord.supervisor.SupervisorStateManagerConfig;
import org.apache.druid.indexing.seekablestream.SeekableStreamDataSourceMetadata;
import org.apache.druid.indexing.seekablestream.SeekableStreamEndSequenceNumbers;
import org.apache.druid.indexing.seekablestream.SeekableStreamIndexTask;
import org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskClient;
import org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskClientFactory;
import org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskIOConfig;
import org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskRunner;
import org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskTuningConfig;
import org.apache.druid.indexing.seekablestream.SeekableStreamStartSequenceNumbers;
import org.apache.druid.indexing.seekablestream.common.OrderedSequenceNumber;
import org.apache.druid.indexing.seekablestream.common.RecordSupplier;
import org.apache.druid.indexing.seekablestream.common.StreamException;
import org.apache.druid.indexing.seekablestream.common.StreamPartition;
import org.apache.druid.indexing.seekablestream.supervisor.SeekableStreamSupervisorStateManager.SeekableStreamExceptionEvent;
import org.apache.druid.indexing.seekablestream.supervisor.SeekableStreamSupervisorStateManager.SeekableStreamState;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.parsers.JSONPathSpec;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider;
import org.apache.druid.server.security.AuthorizerMapper;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import org.joda.time.Period;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.annotation.Nullable;
import java.io.File;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
public class SeekableStreamSupervisorStateTest extends EasyMockSupport
{
private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper();
private static final String DATASOURCE = "testDS";
private static final String STREAM = "stream";
private static final String SHARD_ID = "0";
private static final StreamPartition<String> SHARD0_PARTITION = StreamPartition.of(STREAM, SHARD_ID);
private static final String EXCEPTION_MSG = "I had an exception";
private TaskStorage taskStorage;
private TaskMaster taskMaster;
private TaskRunner taskRunner;
private TaskQueue taskQueue;
private IndexerMetadataStorageCoordinator indexerMetadataStorageCoordinator;
private SeekableStreamIndexTaskClientFactory taskClientFactory;
private SeekableStreamSupervisorSpec spec;
private SeekableStreamIndexTaskClient indexTaskClient;
private RecordSupplier<String, String> recordSupplier;
private RowIngestionMetersFactory rowIngestionMetersFactory;
private SupervisorStateManagerConfig supervisorConfig;
@Before
public void setupTest()
{
taskStorage = createMock(TaskStorage.class);
taskMaster = createMock(TaskMaster.class);
taskRunner = createMock(TaskRunner.class);
taskQueue = createMock(TaskQueue.class);
indexerMetadataStorageCoordinator = createMock(IndexerMetadataStorageCoordinator.class);
taskClientFactory = createMock(SeekableStreamIndexTaskClientFactory.class);
spec = createMock(SeekableStreamSupervisorSpec.class);
indexTaskClient = createMock(SeekableStreamIndexTaskClient.class);
recordSupplier = (RecordSupplier<String, String>) createMock(RecordSupplier.class);
rowIngestionMetersFactory = new TestUtils().getRowIngestionMetersFactory();
supervisorConfig = new SupervisorStateManagerConfig();
EasyMock.expect(spec.getSupervisorStateManagerConfig()).andReturn(supervisorConfig).anyTimes();
EasyMock.expect(spec.getDataSchema()).andReturn(getDataSchema()).anyTimes();
EasyMock.expect(spec.getIoConfig()).andReturn(getIOConfig()).anyTimes();
EasyMock.expect(spec.getTuningConfig()).andReturn(getTuningConfig()).anyTimes();
EasyMock.expect(taskClientFactory.build(
EasyMock.anyObject(),
EasyMock.anyString(),
EasyMock.anyInt(),
EasyMock.anyObject(),
EasyMock.anyLong()
)).andReturn(
indexTaskClient).anyTimes();
EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes();
EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes();
taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class));
EasyMock
.expect(indexerMetadataStorageCoordinator.retrieveDataSourceMetadata(DATASOURCE)).andReturn(null).anyTimes();
EasyMock.expect(recordSupplier.getAssignment()).andReturn(ImmutableSet.of(SHARD0_PARTITION)).anyTimes();
EasyMock.expect(recordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("10").anyTimes();
}
@Test
public void testRunning() throws Exception
{
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
EasyMock.expect(taskStorage.getActiveTasksByDatasource(DATASOURCE)).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
replayAll();
SeekableStreamSupervisor supervisor = new TestSeekableStreamSupervisor();
supervisor.start();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.PENDING, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.PENDING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertTrue(supervisor.stateManager.getExceptionEvents().isEmpty());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertTrue(supervisor.stateManager.getExceptionEvents().isEmpty());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertTrue(supervisor.stateManager.getExceptionEvents().isEmpty());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
verifyAll();
}
@Test
public void testConnectingToStreamFail() throws Exception
{
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM))
.andThrow(new StreamException(new IllegalStateException(EXCEPTION_MSG)))
.anyTimes();
EasyMock.expect(taskStorage.getActiveTasksByDatasource(DATASOURCE)).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
replayAll();
SeekableStreamSupervisor supervisor = new TestSeekableStreamSupervisor();
supervisor.start();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.PENDING, supervisor.stateManager.getSupervisorState());
Assert.assertTrue(supervisor.stateManager.getExceptionEvents().isEmpty());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.CONNECTING_TO_STREAM, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
List<SupervisorStateManager.ExceptionEvent> exceptionEvents = supervisor.stateManager.getExceptionEvents();
Assert.assertEquals(1, exceptionEvents.size());
Assert.assertTrue(((SeekableStreamExceptionEvent) exceptionEvents.get(0)).isStreamException());
Assert.assertEquals(IllegalStateException.class.getName(), exceptionEvents.get(0).getExceptionClass());
Assert.assertEquals(
StringUtils.format("%s: %s", IllegalStateException.class.getName(), EXCEPTION_MSG),
exceptionEvents.get(0).getMessage()
);
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.CONNECTING_TO_STREAM, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertEquals(2, supervisor.stateManager.getExceptionEvents().size());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.UNABLE_TO_CONNECT_TO_STREAM, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertEquals(3, supervisor.stateManager.getExceptionEvents().size());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
verifyAll();
}
@Test
public void testConnectingToStreamFailRecoveryFailRecovery() throws Exception
{
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM))
.andThrow(new StreamException(new IllegalStateException()))
.times(3);
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).times(3);
EasyMock.expect(recordSupplier.getPartitionIds(STREAM))
.andThrow(new StreamException(new IllegalStateException()))
.times(3);
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).times(3);
EasyMock.expect(taskStorage.getActiveTasksByDatasource(DATASOURCE)).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
replayAll();
SeekableStreamSupervisor supervisor = new TestSeekableStreamSupervisor();
supervisor.start();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.PENDING, supervisor.stateManager.getSupervisorState());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.CONNECTING_TO_STREAM, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.CONNECTING_TO_STREAM, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.UNABLE_TO_CONNECT_TO_STREAM, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertEquals(3, supervisor.stateManager.getExceptionEvents().size());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertEquals(SeekableStreamState.UNABLE_TO_CONNECT_TO_STREAM, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState().getBasicState());
supervisor.runInternal();
Assert.assertEquals(SeekableStreamState.UNABLE_TO_CONNECT_TO_STREAM, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState().getBasicState());
supervisor.runInternal();
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
supervisor.runInternal();
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
supervisor.runInternal();
Assert.assertEquals(SeekableStreamState.LOST_CONTACT_WITH_STREAM, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.LOST_CONTACT_WITH_STREAM, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState().getBasicState());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.LOST_CONTACT_WITH_STREAM, supervisor.stateManager.getSupervisorState());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
verifyAll();
}
@Test
public void testDiscoveringInitialTasksFailRecoveryFail() throws Exception
{
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
EasyMock.expect(taskStorage.getActiveTasksByDatasource(DATASOURCE)).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
EasyMock.expect(taskStorage.getActiveTasksByDatasource(DATASOURCE)).andReturn(ImmutableList.of()).times(3);
EasyMock.expect(taskStorage.getActiveTasksByDatasource(DATASOURCE)).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
replayAll();
SeekableStreamSupervisor supervisor = new TestSeekableStreamSupervisor();
supervisor.start();
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.DISCOVERING_INITIAL_TASKS, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
List<SupervisorStateManager.ExceptionEvent> exceptionEvents = supervisor.stateManager.getExceptionEvents();
Assert.assertEquals(1, exceptionEvents.size());
Assert.assertFalse(((SeekableStreamExceptionEvent) exceptionEvents.get(0)).isStreamException());
Assert.assertEquals(IllegalStateException.class.getName(), exceptionEvents.get(0).getExceptionClass());
Assert.assertEquals(EXCEPTION_MSG, exceptionEvents.get(0).getMessage());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.DISCOVERING_INITIAL_TASKS, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(2, supervisor.stateManager.getExceptionEvents().size());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertEquals(3, supervisor.stateManager.getExceptionEvents().size());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(3, supervisor.stateManager.getExceptionEvents().size());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(3, supervisor.stateManager.getExceptionEvents().size());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertEquals(3, supervisor.stateManager.getExceptionEvents().size());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
verifyAll();
}
@Test
public void testCreatingTasksFailRecoveryFail() throws Exception
{
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
EasyMock.expect(taskStorage.getActiveTasksByDatasource(DATASOURCE)).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).times(3);
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
replayAll();
SeekableStreamSupervisor supervisor = new TestSeekableStreamSupervisor();
supervisor.start();
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.CREATING_TASKS, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
List<SupervisorStateManager.ExceptionEvent> exceptionEvents = supervisor.stateManager.getExceptionEvents();
Assert.assertEquals(1, exceptionEvents.size());
Assert.assertFalse(((SeekableStreamExceptionEvent) exceptionEvents.get(0)).isStreamException());
Assert.assertEquals(IllegalStateException.class.getName(), exceptionEvents.get(0).getExceptionClass());
Assert.assertEquals(EXCEPTION_MSG, exceptionEvents.get(0).getMessage());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(SeekableStreamState.CREATING_TASKS, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertEquals(2, supervisor.stateManager.getExceptionEvents().size());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertEquals(3, supervisor.stateManager.getExceptionEvents().size());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(3, supervisor.stateManager.getExceptionEvents().size());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(3, supervisor.stateManager.getExceptionEvents().size());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertEquals(3, supervisor.stateManager.getExceptionEvents().size());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertFalse(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.UNHEALTHY_SUPERVISOR, supervisor.stateManager.getSupervisorState());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
verifyAll();
}
@Test
public void testSuspended() throws Exception
{
EasyMock.expect(spec.isSuspended()).andReturn(true).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
EasyMock.expect(taskStorage.getActiveTasksByDatasource(DATASOURCE)).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
replayAll();
SeekableStreamSupervisor supervisor = new TestSeekableStreamSupervisor();
supervisor.start();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.PENDING, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.PENDING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertTrue(supervisor.stateManager.getExceptionEvents().isEmpty());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.SUSPENDED, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.SUSPENDED, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertTrue(supervisor.stateManager.getExceptionEvents().isEmpty());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.SUSPENDED, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.SUSPENDED, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertTrue(supervisor.stateManager.getExceptionEvents().isEmpty());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
verifyAll();
}
@Test
public void testStopping() throws Exception
{
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
EasyMock.expect(taskStorage.getActiveTasksByDatasource(DATASOURCE)).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
taskRunner.unregisterListener("testSupervisorId");
indexTaskClient.close();
recordSupplier.close();
replayAll();
SeekableStreamSupervisor supervisor = new TestSeekableStreamSupervisor();
supervisor.start();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.PENDING, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.PENDING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertTrue(supervisor.stateManager.getExceptionEvents().isEmpty());
Assert.assertFalse(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.runInternal();
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.RUNNING, supervisor.stateManager.getSupervisorState().getBasicState());
Assert.assertTrue(supervisor.stateManager.getExceptionEvents().isEmpty());
Assert.assertTrue(supervisor.stateManager.isAtLeastOneSuccessfulRun());
supervisor.stop(false);
Assert.assertTrue(supervisor.stateManager.isHealthy());
Assert.assertEquals(BasicState.STOPPING, supervisor.stateManager.getSupervisorState());
Assert.assertEquals(BasicState.STOPPING, supervisor.stateManager.getSupervisorState().getBasicState());
verifyAll();
}
private static DataSchema getDataSchema()
{
List<DimensionSchema> dimensions = new ArrayList<>();
dimensions.add(StringDimensionSchema.create("dim1"));
dimensions.add(StringDimensionSchema.create("dim2"));
return new DataSchema(
DATASOURCE,
new TimestampSpec("timestamp", "iso", null),
new DimensionsSpec(
dimensions,
null,
null
),
new AggregatorFactory[]{new CountAggregatorFactory("rows")},
new UniformGranularitySpec(
Granularities.HOUR,
Granularities.NONE,
ImmutableList.of()
),
null
);
}
private static SeekableStreamSupervisorIOConfig getIOConfig()
{
return new SeekableStreamSupervisorIOConfig(
"stream",
new JsonInputFormat(new JSONPathSpec(true, ImmutableList.of()), ImmutableMap.of()),
1,
1,
new Period("PT1H"),
new Period("P1D"),
new Period("PT30S"),
false,
new Period("PT30M"),
null,
null, null
)
{
};
}
private static SeekableStreamSupervisorTuningConfig getTuningConfig()
{
return new SeekableStreamSupervisorTuningConfig()
{
@Override
public Integer getWorkerThreads()
{
return 1;
}
@Override
public Integer getChatThreads()
{
return 1;
}
@Override
public Long getChatRetries()
{
return 1L;
}
@Override
public Duration getHttpTimeout()
{
return new Period("PT1M").toStandardDuration();
}
@Override
public Duration getShutdownTimeout()
{
return new Period("PT1S").toStandardDuration();
}
@Override
public Duration getRepartitionTransitionDuration()
{
return new Period("PT2M").toStandardDuration();
}
@Override
public SeekableStreamIndexTaskTuningConfig convertToTaskTuningConfig()
{
return new SeekableStreamIndexTaskTuningConfig(
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null
)
{
@Override
public SeekableStreamIndexTaskTuningConfig withBasePersistDirectory(File dir)
{
return null;
}
@Override
public String toString()
{
return null;
}
};
}
};
}
private class TestSeekableStreamIndexTask extends SeekableStreamIndexTask<String, String>
{
public TestSeekableStreamIndexTask(
String id,
@Nullable TaskResource taskResource,
DataSchema dataSchema,
SeekableStreamIndexTaskTuningConfig tuningConfig,
SeekableStreamIndexTaskIOConfig<String, String> ioConfig,
@Nullable Map<String, Object> context,
@Nullable ChatHandlerProvider chatHandlerProvider,
AuthorizerMapper authorizerMapper,
RowIngestionMetersFactory rowIngestionMetersFactory,
@Nullable String groupId
)
{
super(
id,
taskResource,
dataSchema,
tuningConfig,
ioConfig,
context,
chatHandlerProvider,
authorizerMapper,
rowIngestionMetersFactory,
groupId,
null
);
}
@Override
protected SeekableStreamIndexTaskRunner<String, String> createTaskRunner()
{
return null;
}
@Override
protected RecordSupplier<String, String> newTaskRecordSupplier()
{
return recordSupplier;
}
@Override
public String getType()
{
return "test";
}
}
private class TestSeekableStreamSupervisor extends SeekableStreamSupervisor<String, String>
{
private TestSeekableStreamSupervisor()
{
super(
"testSupervisorId",
taskStorage,
taskMaster,
indexerMetadataStorageCoordinator,
taskClientFactory,
OBJECT_MAPPER,
spec,
rowIngestionMetersFactory,
false
);
}
@Override
protected String baseTaskName()
{
return "test";
}
@Override
protected void updateLatestSequenceFromStream(
RecordSupplier<String, String> recordSupplier, Set<StreamPartition<String>> streamPartitions
)
{
// do nothing
}
@Override
protected SeekableStreamIndexTaskIOConfig createTaskIoConfig(
int groupId,
Map<String, String> startPartitions,
Map<String, String> endPartitions,
String baseSequenceName,
DateTime minimumMessageTime,
DateTime maximumMessageTime,
Set<String> exclusiveStartSequenceNumberPartitions,
SeekableStreamSupervisorIOConfig ioConfig
)
{
return new SeekableStreamIndexTaskIOConfig<String, String>(
groupId,
baseSequenceName,
new SeekableStreamStartSequenceNumbers<>(STREAM, startPartitions, exclusiveStartSequenceNumberPartitions),
new SeekableStreamEndSequenceNumbers<>(STREAM, endPartitions),
true,
minimumMessageTime,
maximumMessageTime,
ioConfig.getInputFormat(
getDataSchema().getParser() == null ? null : getDataSchema().getParser().getParseSpec()
)
)
{
};
}
@Override
protected List<SeekableStreamIndexTask<String, String>> createIndexTasks(
int replicas,
String baseSequenceName,
ObjectMapper sortingMapper,
TreeMap<Integer, Map<String, String>> sequenceOffsets,
SeekableStreamIndexTaskIOConfig taskIoConfig,
SeekableStreamIndexTaskTuningConfig taskTuningConfig,
RowIngestionMetersFactory rowIngestionMetersFactory
)
{
return ImmutableList.of(new TestSeekableStreamIndexTask(
"id",
null,
getDataSchema(),
taskTuningConfig,
taskIoConfig,
null,
null,
null,
rowIngestionMetersFactory,
null
));
}
@Override
protected int getTaskGroupIdForPartition(String partition)
{
return 0;
}
@Override
protected boolean checkSourceMetadataMatch(DataSourceMetadata metadata)
{
return true;
}
@Override
protected boolean doesTaskTypeMatchSupervisor(Task task)
{
return true;
}
@Override
protected SeekableStreamDataSourceMetadata<String, String> createDataSourceMetaDataForReset(
String stream,
Map<String, String> map
)
{
return null;
}
@Override
protected OrderedSequenceNumber<String> makeSequenceNumber(String seq, boolean isExclusive)
{
return new OrderedSequenceNumber<String>(seq, isExclusive)
{
@Override
public int compareTo(OrderedSequenceNumber<String> o)
{
return new BigInteger(this.get()).compareTo(new BigInteger(o.get()));
}
};
}
@Override
protected void scheduleReporting(ScheduledExecutorService reportingExec)
{
// do nothing
}
@Override
protected Map<String, String> getLagPerPartition(Map<String, String> currentOffsets)
{
return null;
}
@Override
protected RecordSupplier<String, String> setupRecordSupplier()
{
return recordSupplier;
}
@Override
protected SeekableStreamSupervisorReportPayload<String, String> createReportPayload(
int numPartitions,
boolean includeOffsets
)
{
return new SeekableStreamSupervisorReportPayload<String, String>(
DATASOURCE,
STREAM,
1,
1,
1L,
null,
null,
null,
null,
false,
true,
null,
null,
null
)
{
};
}
@Override
protected String getNotSetMarker()
{
return "NOT_SET";
}
@Override
protected String getEndOfPartitionMarker()
{
return "EOF";
}
@Override
protected boolean isEndOfShard(String seqNum)
{
return false;
}
@Override
protected boolean isShardExpirationMarker(String seqNum)
{
return false;
}
@Override
protected boolean useExclusiveStartSequenceNumberForNonFirstSequence()
{
return false;
}
}
}
| |
/*
* ******************************************************************************
* Copyright 2014-2019 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
// This code is auto-generated, do not modify
package com.spectralogic.ds3client;
import com.spectralogic.ds3client.annotations.Action;
import com.spectralogic.ds3client.annotations.Resource;
import com.spectralogic.ds3client.annotations.ResponsePayloadModel;
import com.spectralogic.ds3client.commands.*;
import com.spectralogic.ds3client.commands.spectrads3.*;
import com.spectralogic.ds3client.commands.spectrads3.notifications.*;
import com.spectralogic.ds3client.models.JobNode;
import com.spectralogic.ds3client.networking.ConnectionDetails;
import com.spectralogic.ds3client.commands.parsers.interfaces.GetObjectCustomParserParameters;
import com.spectralogic.ds3client.commands.parsers.utils.Function;
import java.io.Closeable;
import java.io.IOException;
public interface Ds3Client extends Closeable {
ConnectionDetails getConnectionDetails();
AbortMultiPartUploadResponse abortMultiPartUpload(final AbortMultiPartUploadRequest request)
throws IOException;
CompleteBlobResponse completeBlob(final CompleteBlobRequest request)
throws IOException;
CompleteMultiPartUploadResponse completeMultiPartUpload(final CompleteMultiPartUploadRequest request)
throws IOException;
PutBucketResponse putBucket(final PutBucketRequest request)
throws IOException;
PutMultiPartUploadPartResponse putMultiPartUploadPart(final PutMultiPartUploadPartRequest request)
throws IOException;
PutObjectResponse putObject(final PutObjectRequest request)
throws IOException;
DeleteBucketResponse deleteBucket(final DeleteBucketRequest request)
throws IOException;
DeleteObjectResponse deleteObject(final DeleteObjectRequest request)
throws IOException;
DeleteObjectsResponse deleteObjects(final DeleteObjectsRequest request)
throws IOException;
GetBucketResponse getBucket(final GetBucketRequest request)
throws IOException;
GetServiceResponse getService(final GetServiceRequest request)
throws IOException;
HeadBucketResponse headBucket(final HeadBucketRequest request)
throws IOException;
HeadObjectResponse headObject(final HeadObjectRequest request)
throws IOException;
InitiateMultiPartUploadResponse initiateMultiPartUpload(final InitiateMultiPartUploadRequest request)
throws IOException;
ListMultiPartUploadPartsResponse listMultiPartUploadParts(final ListMultiPartUploadPartsRequest request)
throws IOException;
ListMultiPartUploadsResponse listMultiPartUploads(final ListMultiPartUploadsRequest request)
throws IOException;
@ResponsePayloadModel("BucketAcl")
@Action("CREATE")
@Resource("BUCKET_ACL")
PutBucketAclForGroupSpectraS3Response putBucketAclForGroupSpectraS3(final PutBucketAclForGroupSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketAcl")
@Action("CREATE")
@Resource("BUCKET_ACL")
PutBucketAclForUserSpectraS3Response putBucketAclForUserSpectraS3(final PutBucketAclForUserSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicyAcl")
@Action("CREATE")
@Resource("DATA_POLICY_ACL")
PutDataPolicyAclForGroupSpectraS3Response putDataPolicyAclForGroupSpectraS3(final PutDataPolicyAclForGroupSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicyAcl")
@Action("CREATE")
@Resource("DATA_POLICY_ACL")
PutDataPolicyAclForUserSpectraS3Response putDataPolicyAclForUserSpectraS3(final PutDataPolicyAclForUserSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketAcl")
@Action("CREATE")
@Resource("BUCKET_ACL")
PutGlobalBucketAclForGroupSpectraS3Response putGlobalBucketAclForGroupSpectraS3(final PutGlobalBucketAclForGroupSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketAcl")
@Action("CREATE")
@Resource("BUCKET_ACL")
PutGlobalBucketAclForUserSpectraS3Response putGlobalBucketAclForUserSpectraS3(final PutGlobalBucketAclForUserSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicyAcl")
@Action("CREATE")
@Resource("DATA_POLICY_ACL")
PutGlobalDataPolicyAclForGroupSpectraS3Response putGlobalDataPolicyAclForGroupSpectraS3(final PutGlobalDataPolicyAclForGroupSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicyAcl")
@Action("CREATE")
@Resource("DATA_POLICY_ACL")
PutGlobalDataPolicyAclForUserSpectraS3Response putGlobalDataPolicyAclForUserSpectraS3(final PutGlobalDataPolicyAclForUserSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("BUCKET_ACL")
DeleteBucketAclSpectraS3Response deleteBucketAclSpectraS3(final DeleteBucketAclSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("DATA_POLICY_ACL")
DeleteDataPolicyAclSpectraS3Response deleteDataPolicyAclSpectraS3(final DeleteDataPolicyAclSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketAcl")
@Action("SHOW")
@Resource("BUCKET_ACL")
GetBucketAclSpectraS3Response getBucketAclSpectraS3(final GetBucketAclSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketAclList")
@Action("LIST")
@Resource("BUCKET_ACL")
GetBucketAclsSpectraS3Response getBucketAclsSpectraS3(final GetBucketAclsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicyAcl")
@Action("SHOW")
@Resource("DATA_POLICY_ACL")
GetDataPolicyAclSpectraS3Response getDataPolicyAclSpectraS3(final GetDataPolicyAclSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicyAclList")
@Action("LIST")
@Resource("DATA_POLICY_ACL")
GetDataPolicyAclsSpectraS3Response getDataPolicyAclsSpectraS3(final GetDataPolicyAclsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Bucket")
@Action("CREATE")
@Resource("BUCKET")
PutBucketSpectraS3Response putBucketSpectraS3(final PutBucketSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("BUCKET")
DeleteBucketSpectraS3Response deleteBucketSpectraS3(final DeleteBucketSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Bucket")
@Action("SHOW")
@Resource("BUCKET")
GetBucketSpectraS3Response getBucketSpectraS3(final GetBucketSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketList")
@Action("LIST")
@Resource("BUCKET")
GetBucketsSpectraS3Response getBucketsSpectraS3(final GetBucketsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Bucket")
@Action("MODIFY")
@Resource("BUCKET")
ModifyBucketSpectraS3Response modifyBucketSpectraS3(final ModifyBucketSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("CACHE_FILESYSTEM")
ForceFullCacheReclaimSpectraS3Response forceFullCacheReclaimSpectraS3(final ForceFullCacheReclaimSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CacheFilesystem")
@Action("SHOW")
@Resource("CACHE_FILESYSTEM")
GetCacheFilesystemSpectraS3Response getCacheFilesystemSpectraS3(final GetCacheFilesystemSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CacheFilesystemList")
@Action("LIST")
@Resource("CACHE_FILESYSTEM")
GetCacheFilesystemsSpectraS3Response getCacheFilesystemsSpectraS3(final GetCacheFilesystemsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CacheInformation")
@Action("LIST")
@Resource("CACHE_STATE")
GetCacheStateSpectraS3Response getCacheStateSpectraS3(final GetCacheStateSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CacheFilesystem")
@Action("MODIFY")
@Resource("CACHE_FILESYSTEM")
ModifyCacheFilesystemSpectraS3Response modifyCacheFilesystemSpectraS3(final ModifyCacheFilesystemSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CapacitySummaryContainer")
@Action("LIST")
@Resource("CAPACITY_SUMMARY")
GetBucketCapacitySummarySpectraS3Response getBucketCapacitySummarySpectraS3(final GetBucketCapacitySummarySpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CapacitySummaryContainer")
@Action("LIST")
@Resource("CAPACITY_SUMMARY")
GetStorageDomainCapacitySummarySpectraS3Response getStorageDomainCapacitySummarySpectraS3(final GetStorageDomainCapacitySummarySpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CapacitySummaryContainer")
@Action("LIST")
@Resource("CAPACITY_SUMMARY")
GetSystemCapacitySummarySpectraS3Response getSystemCapacitySummarySpectraS3(final GetSystemCapacitySummarySpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPathBackend")
@Action("LIST")
@Resource("DATA_PATH_BACKEND")
GetDataPathBackendSpectraS3Response getDataPathBackendSpectraS3(final GetDataPathBackendSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BlobStoreTasksInformation")
@Action("LIST")
@Resource("BLOB_STORE_TASK")
GetDataPlannerBlobStoreTasksSpectraS3Response getDataPlannerBlobStoreTasksSpectraS3(final GetDataPlannerBlobStoreTasksSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPathBackend")
@Action("BULK_MODIFY")
@Resource("DATA_PATH_BACKEND")
ModifyDataPathBackendSpectraS3Response modifyDataPathBackendSpectraS3(final ModifyDataPathBackendSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureDataReplicationRule")
@Action("CREATE")
@Resource("AZURE_DATA_REPLICATION_RULE")
PutAzureDataReplicationRuleSpectraS3Response putAzureDataReplicationRuleSpectraS3(final PutAzureDataReplicationRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPersistenceRule")
@Action("CREATE")
@Resource("DATA_PERSISTENCE_RULE")
PutDataPersistenceRuleSpectraS3Response putDataPersistenceRuleSpectraS3(final PutDataPersistenceRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicy")
@Action("CREATE")
@Resource("DATA_POLICY")
PutDataPolicySpectraS3Response putDataPolicySpectraS3(final PutDataPolicySpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3DataReplicationRule")
@Action("CREATE")
@Resource("DS3_DATA_REPLICATION_RULE")
PutDs3DataReplicationRuleSpectraS3Response putDs3DataReplicationRuleSpectraS3(final PutDs3DataReplicationRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3DataReplicationRule")
@Action("CREATE")
@Resource("S3_DATA_REPLICATION_RULE")
PutS3DataReplicationRuleSpectraS3Response putS3DataReplicationRuleSpectraS3(final PutS3DataReplicationRuleSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("AZURE_DATA_REPLICATION_RULE")
DeleteAzureDataReplicationRuleSpectraS3Response deleteAzureDataReplicationRuleSpectraS3(final DeleteAzureDataReplicationRuleSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("DATA_PERSISTENCE_RULE")
DeleteDataPersistenceRuleSpectraS3Response deleteDataPersistenceRuleSpectraS3(final DeleteDataPersistenceRuleSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("DATA_POLICY")
DeleteDataPolicySpectraS3Response deleteDataPolicySpectraS3(final DeleteDataPolicySpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("DS3_DATA_REPLICATION_RULE")
DeleteDs3DataReplicationRuleSpectraS3Response deleteDs3DataReplicationRuleSpectraS3(final DeleteDs3DataReplicationRuleSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("S3_DATA_REPLICATION_RULE")
DeleteS3DataReplicationRuleSpectraS3Response deleteS3DataReplicationRuleSpectraS3(final DeleteS3DataReplicationRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureDataReplicationRule")
@Action("SHOW")
@Resource("AZURE_DATA_REPLICATION_RULE")
GetAzureDataReplicationRuleSpectraS3Response getAzureDataReplicationRuleSpectraS3(final GetAzureDataReplicationRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureDataReplicationRuleList")
@Action("LIST")
@Resource("AZURE_DATA_REPLICATION_RULE")
GetAzureDataReplicationRulesSpectraS3Response getAzureDataReplicationRulesSpectraS3(final GetAzureDataReplicationRulesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPersistenceRule")
@Action("SHOW")
@Resource("DATA_PERSISTENCE_RULE")
GetDataPersistenceRuleSpectraS3Response getDataPersistenceRuleSpectraS3(final GetDataPersistenceRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPersistenceRuleList")
@Action("LIST")
@Resource("DATA_PERSISTENCE_RULE")
GetDataPersistenceRulesSpectraS3Response getDataPersistenceRulesSpectraS3(final GetDataPersistenceRulesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicyList")
@Action("LIST")
@Resource("DATA_POLICY")
GetDataPoliciesSpectraS3Response getDataPoliciesSpectraS3(final GetDataPoliciesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicy")
@Action("SHOW")
@Resource("DATA_POLICY")
GetDataPolicySpectraS3Response getDataPolicySpectraS3(final GetDataPolicySpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3DataReplicationRule")
@Action("SHOW")
@Resource("DS3_DATA_REPLICATION_RULE")
GetDs3DataReplicationRuleSpectraS3Response getDs3DataReplicationRuleSpectraS3(final GetDs3DataReplicationRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3DataReplicationRuleList")
@Action("LIST")
@Resource("DS3_DATA_REPLICATION_RULE")
GetDs3DataReplicationRulesSpectraS3Response getDs3DataReplicationRulesSpectraS3(final GetDs3DataReplicationRulesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3DataReplicationRule")
@Action("SHOW")
@Resource("S3_DATA_REPLICATION_RULE")
GetS3DataReplicationRuleSpectraS3Response getS3DataReplicationRuleSpectraS3(final GetS3DataReplicationRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3DataReplicationRuleList")
@Action("LIST")
@Resource("S3_DATA_REPLICATION_RULE")
GetS3DataReplicationRulesSpectraS3Response getS3DataReplicationRulesSpectraS3(final GetS3DataReplicationRulesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureDataReplicationRule")
@Action("MODIFY")
@Resource("AZURE_DATA_REPLICATION_RULE")
ModifyAzureDataReplicationRuleSpectraS3Response modifyAzureDataReplicationRuleSpectraS3(final ModifyAzureDataReplicationRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPersistenceRule")
@Action("MODIFY")
@Resource("DATA_PERSISTENCE_RULE")
ModifyDataPersistenceRuleSpectraS3Response modifyDataPersistenceRuleSpectraS3(final ModifyDataPersistenceRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicy")
@Action("MODIFY")
@Resource("DATA_POLICY")
ModifyDataPolicySpectraS3Response modifyDataPolicySpectraS3(final ModifyDataPolicySpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3DataReplicationRule")
@Action("MODIFY")
@Resource("DS3_DATA_REPLICATION_RULE")
ModifyDs3DataReplicationRuleSpectraS3Response modifyDs3DataReplicationRuleSpectraS3(final ModifyDs3DataReplicationRuleSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3DataReplicationRule")
@Action("MODIFY")
@Resource("S3_DATA_REPLICATION_RULE")
ModifyS3DataReplicationRuleSpectraS3Response modifyS3DataReplicationRuleSpectraS3(final ModifyS3DataReplicationRuleSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("SUSPECT_BLOB_AZURE_TARGET")
ClearSuspectBlobAzureTargetsSpectraS3Response clearSuspectBlobAzureTargetsSpectraS3(final ClearSuspectBlobAzureTargetsSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("SUSPECT_BLOB_DS3_TARGET")
ClearSuspectBlobDs3TargetsSpectraS3Response clearSuspectBlobDs3TargetsSpectraS3(final ClearSuspectBlobDs3TargetsSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("SUSPECT_BLOB_POOL")
ClearSuspectBlobPoolsSpectraS3Response clearSuspectBlobPoolsSpectraS3(final ClearSuspectBlobPoolsSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("SUSPECT_BLOB_S3_TARGET")
ClearSuspectBlobS3TargetsSpectraS3Response clearSuspectBlobS3TargetsSpectraS3(final ClearSuspectBlobS3TargetsSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("SUSPECT_BLOB_TAPE")
ClearSuspectBlobTapesSpectraS3Response clearSuspectBlobTapesSpectraS3(final ClearSuspectBlobTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureDataReplicationRuleList")
@Action("LIST")
@Resource("DEGRADED_AZURE_DATA_REPLICATION_RULE")
GetDegradedAzureDataReplicationRulesSpectraS3Response getDegradedAzureDataReplicationRulesSpectraS3(final GetDegradedAzureDataReplicationRulesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DegradedBlobList")
@Action("LIST")
@Resource("DEGRADED_BLOB")
GetDegradedBlobsSpectraS3Response getDegradedBlobsSpectraS3(final GetDegradedBlobsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketList")
@Action("LIST")
@Resource("DEGRADED_BUCKET")
GetDegradedBucketsSpectraS3Response getDegradedBucketsSpectraS3(final GetDegradedBucketsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPersistenceRuleList")
@Action("LIST")
@Resource("DEGRADED_DATA_PERSISTENCE_RULE")
GetDegradedDataPersistenceRulesSpectraS3Response getDegradedDataPersistenceRulesSpectraS3(final GetDegradedDataPersistenceRulesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3DataReplicationRuleList")
@Action("LIST")
@Resource("DEGRADED_DS3_DATA_REPLICATION_RULE")
GetDegradedDs3DataReplicationRulesSpectraS3Response getDegradedDs3DataReplicationRulesSpectraS3(final GetDegradedDs3DataReplicationRulesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3DataReplicationRuleList")
@Action("LIST")
@Resource("DEGRADED_S3_DATA_REPLICATION_RULE")
GetDegradedS3DataReplicationRulesSpectraS3Response getDegradedS3DataReplicationRulesSpectraS3(final GetDegradedS3DataReplicationRulesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SuspectBlobAzureTargetList")
@Action("LIST")
@Resource("SUSPECT_BLOB_AZURE_TARGET")
GetSuspectBlobAzureTargetsSpectraS3Response getSuspectBlobAzureTargetsSpectraS3(final GetSuspectBlobAzureTargetsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SuspectBlobDs3TargetList")
@Action("LIST")
@Resource("SUSPECT_BLOB_DS3_TARGET")
GetSuspectBlobDs3TargetsSpectraS3Response getSuspectBlobDs3TargetsSpectraS3(final GetSuspectBlobDs3TargetsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SuspectBlobPoolList")
@Action("LIST")
@Resource("SUSPECT_BLOB_POOL")
GetSuspectBlobPoolsSpectraS3Response getSuspectBlobPoolsSpectraS3(final GetSuspectBlobPoolsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SuspectBlobS3TargetList")
@Action("LIST")
@Resource("SUSPECT_BLOB_S3_TARGET")
GetSuspectBlobS3TargetsSpectraS3Response getSuspectBlobS3TargetsSpectraS3(final GetSuspectBlobS3TargetsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SuspectBlobTapeList")
@Action("LIST")
@Resource("SUSPECT_BLOB_TAPE")
GetSuspectBlobTapesSpectraS3Response getSuspectBlobTapesSpectraS3(final GetSuspectBlobTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketList")
@Action("LIST")
@Resource("SUSPECT_BUCKET")
GetSuspectBucketsSpectraS3Response getSuspectBucketsSpectraS3(final GetSuspectBucketsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectList")
@Action("LIST")
@Resource("SUSPECT_OBJECT")
GetSuspectObjectsSpectraS3Response getSuspectObjectsSpectraS3(final GetSuspectObjectsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BulkObjectList")
@Action("LIST")
@Resource("SUSPECT_OBJECT")
GetSuspectObjectsWithFullDetailsSpectraS3Response getSuspectObjectsWithFullDetailsSpectraS3(final GetSuspectObjectsWithFullDetailsSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("SUSPECT_BLOB_AZURE_TARGET")
MarkSuspectBlobAzureTargetsAsDegradedSpectraS3Response markSuspectBlobAzureTargetsAsDegradedSpectraS3(final MarkSuspectBlobAzureTargetsAsDegradedSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("SUSPECT_BLOB_DS3_TARGET")
MarkSuspectBlobDs3TargetsAsDegradedSpectraS3Response markSuspectBlobDs3TargetsAsDegradedSpectraS3(final MarkSuspectBlobDs3TargetsAsDegradedSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("SUSPECT_BLOB_POOL")
MarkSuspectBlobPoolsAsDegradedSpectraS3Response markSuspectBlobPoolsAsDegradedSpectraS3(final MarkSuspectBlobPoolsAsDegradedSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("SUSPECT_BLOB_S3_TARGET")
MarkSuspectBlobS3TargetsAsDegradedSpectraS3Response markSuspectBlobS3TargetsAsDegradedSpectraS3(final MarkSuspectBlobS3TargetsAsDegradedSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("SUSPECT_BLOB_TAPE")
MarkSuspectBlobTapesAsDegradedSpectraS3Response markSuspectBlobTapesAsDegradedSpectraS3(final MarkSuspectBlobTapesAsDegradedSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("GroupMember")
@Action("CREATE")
@Resource("GROUP_MEMBER")
PutGroupGroupMemberSpectraS3Response putGroupGroupMemberSpectraS3(final PutGroupGroupMemberSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Group")
@Action("CREATE")
@Resource("GROUP")
PutGroupSpectraS3Response putGroupSpectraS3(final PutGroupSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("GroupMember")
@Action("CREATE")
@Resource("GROUP_MEMBER")
PutUserGroupMemberSpectraS3Response putUserGroupMemberSpectraS3(final PutUserGroupMemberSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("GROUP_MEMBER")
DeleteGroupMemberSpectraS3Response deleteGroupMemberSpectraS3(final DeleteGroupMemberSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("GROUP")
DeleteGroupSpectraS3Response deleteGroupSpectraS3(final DeleteGroupSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("GroupMember")
@Action("SHOW")
@Resource("GROUP_MEMBER")
GetGroupMemberSpectraS3Response getGroupMemberSpectraS3(final GetGroupMemberSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("GroupMemberList")
@Action("LIST")
@Resource("GROUP_MEMBER")
GetGroupMembersSpectraS3Response getGroupMembersSpectraS3(final GetGroupMembersSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Group")
@Action("SHOW")
@Resource("GROUP")
GetGroupSpectraS3Response getGroupSpectraS3(final GetGroupSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("GroupList")
@Action("LIST")
@Resource("GROUP")
GetGroupsSpectraS3Response getGroupsSpectraS3(final GetGroupsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Group")
@Action("MODIFY")
@Resource("GROUP")
ModifyGroupSpectraS3Response modifyGroupSpectraS3(final ModifyGroupSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Group")
@Action("MODIFY")
@Resource("GROUP")
VerifyUserIsMemberOfGroupSpectraS3Response verifyUserIsMemberOfGroupSpectraS3(final VerifyUserIsMemberOfGroupSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Objects")
@Action("MODIFY")
@Resource("JOB_CHUNK")
AllocateJobChunkSpectraS3Response allocateJobChunkSpectraS3(final AllocateJobChunkSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("ACTIVE_JOB")
CancelActiveJobSpectraS3Response cancelActiveJobSpectraS3(final CancelActiveJobSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("ACTIVE_JOB")
CancelAllActiveJobsSpectraS3Response cancelAllActiveJobsSpectraS3(final CancelAllActiveJobsSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("JOB")
CancelAllJobsSpectraS3Response cancelAllJobsSpectraS3(final CancelAllJobsSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("JOB")
CancelJobSpectraS3Response cancelJobSpectraS3(final CancelJobSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("CANCELED_JOB")
ClearAllCanceledJobsSpectraS3Response clearAllCanceledJobsSpectraS3(final ClearAllCanceledJobsSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("COMPLETED_JOB")
ClearAllCompletedJobsSpectraS3Response clearAllCompletedJobsSpectraS3(final ClearAllCompletedJobsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("MasterObjectList")
@Action("MODIFY")
@Resource("JOB")
CloseAggregatingJobSpectraS3Response closeAggregatingJobSpectraS3(final CloseAggregatingJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("MasterObjectList")
@Action("MODIFY")
@Resource("BUCKET")
GetBulkJobSpectraS3Response getBulkJobSpectraS3(final GetBulkJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("MasterObjectList")
@Action("MODIFY")
@Resource("BUCKET")
PutBulkJobSpectraS3Response putBulkJobSpectraS3(final PutBulkJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("MasterObjectList")
@Action("MODIFY")
@Resource("BUCKET")
VerifyBulkJobSpectraS3Response verifyBulkJobSpectraS3(final VerifyBulkJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("ActiveJob")
@Action("SHOW")
@Resource("ACTIVE_JOB")
GetActiveJobSpectraS3Response getActiveJobSpectraS3(final GetActiveJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("ActiveJobList")
@Action("LIST")
@Resource("ACTIVE_JOB")
GetActiveJobsSpectraS3Response getActiveJobsSpectraS3(final GetActiveJobsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CanceledJob")
@Action("SHOW")
@Resource("CANCELED_JOB")
GetCanceledJobSpectraS3Response getCanceledJobSpectraS3(final GetCanceledJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CanceledJobList")
@Action("LIST")
@Resource("CANCELED_JOB")
GetCanceledJobsSpectraS3Response getCanceledJobsSpectraS3(final GetCanceledJobsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CompletedJob")
@Action("SHOW")
@Resource("COMPLETED_JOB")
GetCompletedJobSpectraS3Response getCompletedJobSpectraS3(final GetCompletedJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("CompletedJobList")
@Action("LIST")
@Resource("COMPLETED_JOB")
GetCompletedJobsSpectraS3Response getCompletedJobsSpectraS3(final GetCompletedJobsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobChunk")
@Action("SHOW")
@Resource("JOB_CHUNK_DAO")
GetJobChunkDaoSpectraS3Response getJobChunkDaoSpectraS3(final GetJobChunkDaoSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Objects")
@Action("SHOW")
@Resource("JOB_CHUNK")
GetJobChunkSpectraS3Response getJobChunkSpectraS3(final GetJobChunkSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("MasterObjectList")
@Action("LIST")
@Resource("JOB_CHUNK")
GetJobChunksReadyForClientProcessingSpectraS3Response getJobChunksReadyForClientProcessingSpectraS3(final GetJobChunksReadyForClientProcessingSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("MasterObjectList")
@Action("SHOW")
@Resource("JOB")
GetJobSpectraS3Response getJobSpectraS3(final GetJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("String")
@Action("SHOW")
@Resource("JOB")
GetJobToReplicateSpectraS3Response getJobToReplicateSpectraS3(final GetJobToReplicateSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobList")
@Action("LIST")
@Resource("JOB")
GetJobsSpectraS3Response getJobsSpectraS3(final GetJobsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("MasterObjectList")
@Action("MODIFY")
@Resource("ACTIVE_JOB")
ModifyActiveJobSpectraS3Response modifyActiveJobSpectraS3(final ModifyActiveJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("MasterObjectList")
@Action("MODIFY")
@Resource("JOB")
ModifyJobSpectraS3Response modifyJobSpectraS3(final ModifyJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("MasterObjectList")
@Action("MODIFY")
@Resource("BUCKET")
ReplicatePutJobSpectraS3Response replicatePutJobSpectraS3(final ReplicatePutJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("MasterObjectList")
@Action("MODIFY")
@Resource("BUCKET")
StageObjectsJobSpectraS3Response stageObjectsJobSpectraS3(final StageObjectsJobSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("ACTIVE_JOB")
TruncateActiveJobSpectraS3Response truncateActiveJobSpectraS3(final TruncateActiveJobSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("ACTIVE_JOB")
TruncateAllActiveJobsSpectraS3Response truncateAllActiveJobsSpectraS3(final TruncateAllActiveJobsSpectraS3Request request)
throws IOException;
@Action("BULK_DELETE")
@Resource("JOB")
TruncateAllJobsSpectraS3Response truncateAllJobsSpectraS3(final TruncateAllJobsSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("JOB")
TruncateJobSpectraS3Response truncateJobSpectraS3(final TruncateJobSpectraS3Request request)
throws IOException;
@Action("MODIFY")
@Resource("BUCKET")
VerifySafeToCreatePutJobSpectraS3Response verifySafeToCreatePutJobSpectraS3(final VerifySafeToCreatePutJobSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Node")
@Action("SHOW")
@Resource("NODE")
GetNodeSpectraS3Response getNodeSpectraS3(final GetNodeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("NodeList")
@Action("LIST")
@Resource("NODE")
GetNodesSpectraS3Response getNodesSpectraS3(final GetNodesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Node")
@Action("MODIFY")
@Resource("NODE")
ModifyNodeSpectraS3Response modifyNodeSpectraS3(final ModifyNodeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTargetFailureNotificationRegistration")
@Action("CREATE")
@Resource("AZURE_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
PutAzureTargetFailureNotificationRegistrationSpectraS3Response putAzureTargetFailureNotificationRegistrationSpectraS3(final PutAzureTargetFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketChangesNotificationRegistration")
@Action("CREATE")
@Resource("BUCKET_CHANGES_NOTIFICATION_REGISTRATION")
PutBucketChangesNotificationRegistrationSpectraS3Response putBucketChangesNotificationRegistrationSpectraS3(final PutBucketChangesNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3TargetFailureNotificationRegistration")
@Action("CREATE")
@Resource("DS3_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
PutDs3TargetFailureNotificationRegistrationSpectraS3Response putDs3TargetFailureNotificationRegistrationSpectraS3(final PutDs3TargetFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobCompletedNotificationRegistration")
@Action("CREATE")
@Resource("JOB_COMPLETED_NOTIFICATION_REGISTRATION")
PutJobCompletedNotificationRegistrationSpectraS3Response putJobCompletedNotificationRegistrationSpectraS3(final PutJobCompletedNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobCreatedNotificationRegistration")
@Action("CREATE")
@Resource("JOB_CREATED_NOTIFICATION_REGISTRATION")
PutJobCreatedNotificationRegistrationSpectraS3Response putJobCreatedNotificationRegistrationSpectraS3(final PutJobCreatedNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobCreationFailedNotificationRegistration")
@Action("CREATE")
@Resource("JOB_CREATION_FAILED_NOTIFICATION_REGISTRATION")
PutJobCreationFailedNotificationRegistrationSpectraS3Response putJobCreationFailedNotificationRegistrationSpectraS3(final PutJobCreationFailedNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectCachedNotificationRegistration")
@Action("CREATE")
@Resource("OBJECT_CACHED_NOTIFICATION_REGISTRATION")
PutObjectCachedNotificationRegistrationSpectraS3Response putObjectCachedNotificationRegistrationSpectraS3(final PutObjectCachedNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectLostNotificationRegistration")
@Action("CREATE")
@Resource("OBJECT_LOST_NOTIFICATION_REGISTRATION")
PutObjectLostNotificationRegistrationSpectraS3Response putObjectLostNotificationRegistrationSpectraS3(final PutObjectLostNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectPersistedNotificationRegistration")
@Action("CREATE")
@Resource("OBJECT_PERSISTED_NOTIFICATION_REGISTRATION")
PutObjectPersistedNotificationRegistrationSpectraS3Response putObjectPersistedNotificationRegistrationSpectraS3(final PutObjectPersistedNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PoolFailureNotificationRegistration")
@Action("CREATE")
@Resource("POOL_FAILURE_NOTIFICATION_REGISTRATION")
PutPoolFailureNotificationRegistrationSpectraS3Response putPoolFailureNotificationRegistrationSpectraS3(final PutPoolFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3TargetFailureNotificationRegistration")
@Action("CREATE")
@Resource("S3_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
PutS3TargetFailureNotificationRegistrationSpectraS3Response putS3TargetFailureNotificationRegistrationSpectraS3(final PutS3TargetFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomainFailureNotificationRegistration")
@Action("CREATE")
@Resource("STORAGE_DOMAIN_FAILURE_NOTIFICATION_REGISTRATION")
PutStorageDomainFailureNotificationRegistrationSpectraS3Response putStorageDomainFailureNotificationRegistrationSpectraS3(final PutStorageDomainFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SystemFailureNotificationRegistration")
@Action("CREATE")
@Resource("SYSTEM_FAILURE_NOTIFICATION_REGISTRATION")
PutSystemFailureNotificationRegistrationSpectraS3Response putSystemFailureNotificationRegistrationSpectraS3(final PutSystemFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureNotificationRegistration")
@Action("CREATE")
@Resource("TAPE_FAILURE_NOTIFICATION_REGISTRATION")
PutTapeFailureNotificationRegistrationSpectraS3Response putTapeFailureNotificationRegistrationSpectraS3(final PutTapeFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapePartitionFailureNotificationRegistration")
@Action("CREATE")
@Resource("TAPE_PARTITION_FAILURE_NOTIFICATION_REGISTRATION")
PutTapePartitionFailureNotificationRegistrationSpectraS3Response putTapePartitionFailureNotificationRegistrationSpectraS3(final PutTapePartitionFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("AZURE_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
DeleteAzureTargetFailureNotificationRegistrationSpectraS3Response deleteAzureTargetFailureNotificationRegistrationSpectraS3(final DeleteAzureTargetFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("BUCKET_CHANGES_NOTIFICATION_REGISTRATION")
DeleteBucketChangesNotificationRegistrationSpectraS3Response deleteBucketChangesNotificationRegistrationSpectraS3(final DeleteBucketChangesNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("DS3_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
DeleteDs3TargetFailureNotificationRegistrationSpectraS3Response deleteDs3TargetFailureNotificationRegistrationSpectraS3(final DeleteDs3TargetFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("JOB_COMPLETED_NOTIFICATION_REGISTRATION")
DeleteJobCompletedNotificationRegistrationSpectraS3Response deleteJobCompletedNotificationRegistrationSpectraS3(final DeleteJobCompletedNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("JOB_CREATED_NOTIFICATION_REGISTRATION")
DeleteJobCreatedNotificationRegistrationSpectraS3Response deleteJobCreatedNotificationRegistrationSpectraS3(final DeleteJobCreatedNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("JOB_CREATION_FAILED_NOTIFICATION_REGISTRATION")
DeleteJobCreationFailedNotificationRegistrationSpectraS3Response deleteJobCreationFailedNotificationRegistrationSpectraS3(final DeleteJobCreationFailedNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("OBJECT_CACHED_NOTIFICATION_REGISTRATION")
DeleteObjectCachedNotificationRegistrationSpectraS3Response deleteObjectCachedNotificationRegistrationSpectraS3(final DeleteObjectCachedNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("OBJECT_LOST_NOTIFICATION_REGISTRATION")
DeleteObjectLostNotificationRegistrationSpectraS3Response deleteObjectLostNotificationRegistrationSpectraS3(final DeleteObjectLostNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("OBJECT_PERSISTED_NOTIFICATION_REGISTRATION")
DeleteObjectPersistedNotificationRegistrationSpectraS3Response deleteObjectPersistedNotificationRegistrationSpectraS3(final DeleteObjectPersistedNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("POOL_FAILURE_NOTIFICATION_REGISTRATION")
DeletePoolFailureNotificationRegistrationSpectraS3Response deletePoolFailureNotificationRegistrationSpectraS3(final DeletePoolFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("S3_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
DeleteS3TargetFailureNotificationRegistrationSpectraS3Response deleteS3TargetFailureNotificationRegistrationSpectraS3(final DeleteS3TargetFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("STORAGE_DOMAIN_FAILURE_NOTIFICATION_REGISTRATION")
DeleteStorageDomainFailureNotificationRegistrationSpectraS3Response deleteStorageDomainFailureNotificationRegistrationSpectraS3(final DeleteStorageDomainFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("SYSTEM_FAILURE_NOTIFICATION_REGISTRATION")
DeleteSystemFailureNotificationRegistrationSpectraS3Response deleteSystemFailureNotificationRegistrationSpectraS3(final DeleteSystemFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("TAPE_FAILURE_NOTIFICATION_REGISTRATION")
DeleteTapeFailureNotificationRegistrationSpectraS3Response deleteTapeFailureNotificationRegistrationSpectraS3(final DeleteTapeFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("TAPE_PARTITION_FAILURE_NOTIFICATION_REGISTRATION")
DeleteTapePartitionFailureNotificationRegistrationSpectraS3Response deleteTapePartitionFailureNotificationRegistrationSpectraS3(final DeleteTapePartitionFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTargetFailureNotificationRegistration")
@Action("SHOW")
@Resource("AZURE_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
GetAzureTargetFailureNotificationRegistrationSpectraS3Response getAzureTargetFailureNotificationRegistrationSpectraS3(final GetAzureTargetFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTargetFailureNotificationRegistrationList")
@Action("LIST")
@Resource("AZURE_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
GetAzureTargetFailureNotificationRegistrationsSpectraS3Response getAzureTargetFailureNotificationRegistrationsSpectraS3(final GetAzureTargetFailureNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketChangesNotificationRegistration")
@Action("SHOW")
@Resource("BUCKET_CHANGES_NOTIFICATION_REGISTRATION")
GetBucketChangesNotificationRegistrationSpectraS3Response getBucketChangesNotificationRegistrationSpectraS3(final GetBucketChangesNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketChangesNotificationRegistrationList")
@Action("LIST")
@Resource("BUCKET_CHANGES_NOTIFICATION_REGISTRATION")
GetBucketChangesNotificationRegistrationsSpectraS3Response getBucketChangesNotificationRegistrationsSpectraS3(final GetBucketChangesNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BucketHistoryEventList")
@Action("LIST")
@Resource("BUCKET_HISTORY")
GetBucketHistorySpectraS3Response getBucketHistorySpectraS3(final GetBucketHistorySpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3TargetFailureNotificationRegistration")
@Action("SHOW")
@Resource("DS3_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
GetDs3TargetFailureNotificationRegistrationSpectraS3Response getDs3TargetFailureNotificationRegistrationSpectraS3(final GetDs3TargetFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3TargetFailureNotificationRegistrationList")
@Action("LIST")
@Resource("DS3_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
GetDs3TargetFailureNotificationRegistrationsSpectraS3Response getDs3TargetFailureNotificationRegistrationsSpectraS3(final GetDs3TargetFailureNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobCompletedNotificationRegistration")
@Action("SHOW")
@Resource("JOB_COMPLETED_NOTIFICATION_REGISTRATION")
GetJobCompletedNotificationRegistrationSpectraS3Response getJobCompletedNotificationRegistrationSpectraS3(final GetJobCompletedNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobCompletedNotificationRegistrationList")
@Action("LIST")
@Resource("JOB_COMPLETED_NOTIFICATION_REGISTRATION")
GetJobCompletedNotificationRegistrationsSpectraS3Response getJobCompletedNotificationRegistrationsSpectraS3(final GetJobCompletedNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobCreatedNotificationRegistration")
@Action("SHOW")
@Resource("JOB_CREATED_NOTIFICATION_REGISTRATION")
GetJobCreatedNotificationRegistrationSpectraS3Response getJobCreatedNotificationRegistrationSpectraS3(final GetJobCreatedNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobCreatedNotificationRegistrationList")
@Action("LIST")
@Resource("JOB_CREATED_NOTIFICATION_REGISTRATION")
GetJobCreatedNotificationRegistrationsSpectraS3Response getJobCreatedNotificationRegistrationsSpectraS3(final GetJobCreatedNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobCreationFailedNotificationRegistration")
@Action("SHOW")
@Resource("JOB_CREATION_FAILED_NOTIFICATION_REGISTRATION")
GetJobCreationFailedNotificationRegistrationSpectraS3Response getJobCreationFailedNotificationRegistrationSpectraS3(final GetJobCreationFailedNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("JobCreationFailedNotificationRegistrationList")
@Action("LIST")
@Resource("JOB_CREATION_FAILED_NOTIFICATION_REGISTRATION")
GetJobCreationFailedNotificationRegistrationsSpectraS3Response getJobCreationFailedNotificationRegistrationsSpectraS3(final GetJobCreationFailedNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectCachedNotificationRegistration")
@Action("SHOW")
@Resource("OBJECT_CACHED_NOTIFICATION_REGISTRATION")
GetObjectCachedNotificationRegistrationSpectraS3Response getObjectCachedNotificationRegistrationSpectraS3(final GetObjectCachedNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectCachedNotificationRegistrationList")
@Action("LIST")
@Resource("OBJECT_CACHED_NOTIFICATION_REGISTRATION")
GetObjectCachedNotificationRegistrationsSpectraS3Response getObjectCachedNotificationRegistrationsSpectraS3(final GetObjectCachedNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectLostNotificationRegistration")
@Action("SHOW")
@Resource("OBJECT_LOST_NOTIFICATION_REGISTRATION")
GetObjectLostNotificationRegistrationSpectraS3Response getObjectLostNotificationRegistrationSpectraS3(final GetObjectLostNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectLostNotificationRegistrationList")
@Action("LIST")
@Resource("OBJECT_LOST_NOTIFICATION_REGISTRATION")
GetObjectLostNotificationRegistrationsSpectraS3Response getObjectLostNotificationRegistrationsSpectraS3(final GetObjectLostNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectPersistedNotificationRegistration")
@Action("SHOW")
@Resource("OBJECT_PERSISTED_NOTIFICATION_REGISTRATION")
GetObjectPersistedNotificationRegistrationSpectraS3Response getObjectPersistedNotificationRegistrationSpectraS3(final GetObjectPersistedNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectPersistedNotificationRegistrationList")
@Action("LIST")
@Resource("OBJECT_PERSISTED_NOTIFICATION_REGISTRATION")
GetObjectPersistedNotificationRegistrationsSpectraS3Response getObjectPersistedNotificationRegistrationsSpectraS3(final GetObjectPersistedNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PoolFailureNotificationRegistration")
@Action("SHOW")
@Resource("POOL_FAILURE_NOTIFICATION_REGISTRATION")
GetPoolFailureNotificationRegistrationSpectraS3Response getPoolFailureNotificationRegistrationSpectraS3(final GetPoolFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PoolFailureNotificationRegistrationList")
@Action("LIST")
@Resource("POOL_FAILURE_NOTIFICATION_REGISTRATION")
GetPoolFailureNotificationRegistrationsSpectraS3Response getPoolFailureNotificationRegistrationsSpectraS3(final GetPoolFailureNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3TargetFailureNotificationRegistration")
@Action("SHOW")
@Resource("S3_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
GetS3TargetFailureNotificationRegistrationSpectraS3Response getS3TargetFailureNotificationRegistrationSpectraS3(final GetS3TargetFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3TargetFailureNotificationRegistrationList")
@Action("LIST")
@Resource("S3_TARGET_FAILURE_NOTIFICATION_REGISTRATION")
GetS3TargetFailureNotificationRegistrationsSpectraS3Response getS3TargetFailureNotificationRegistrationsSpectraS3(final GetS3TargetFailureNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomainFailureNotificationRegistration")
@Action("SHOW")
@Resource("STORAGE_DOMAIN_FAILURE_NOTIFICATION_REGISTRATION")
GetStorageDomainFailureNotificationRegistrationSpectraS3Response getStorageDomainFailureNotificationRegistrationSpectraS3(final GetStorageDomainFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomainFailureNotificationRegistrationList")
@Action("LIST")
@Resource("STORAGE_DOMAIN_FAILURE_NOTIFICATION_REGISTRATION")
GetStorageDomainFailureNotificationRegistrationsSpectraS3Response getStorageDomainFailureNotificationRegistrationsSpectraS3(final GetStorageDomainFailureNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SystemFailureNotificationRegistration")
@Action("SHOW")
@Resource("SYSTEM_FAILURE_NOTIFICATION_REGISTRATION")
GetSystemFailureNotificationRegistrationSpectraS3Response getSystemFailureNotificationRegistrationSpectraS3(final GetSystemFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SystemFailureNotificationRegistrationList")
@Action("LIST")
@Resource("SYSTEM_FAILURE_NOTIFICATION_REGISTRATION")
GetSystemFailureNotificationRegistrationsSpectraS3Response getSystemFailureNotificationRegistrationsSpectraS3(final GetSystemFailureNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureNotificationRegistration")
@Action("SHOW")
@Resource("TAPE_FAILURE_NOTIFICATION_REGISTRATION")
GetTapeFailureNotificationRegistrationSpectraS3Response getTapeFailureNotificationRegistrationSpectraS3(final GetTapeFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureNotificationRegistrationList")
@Action("LIST")
@Resource("TAPE_FAILURE_NOTIFICATION_REGISTRATION")
GetTapeFailureNotificationRegistrationsSpectraS3Response getTapeFailureNotificationRegistrationsSpectraS3(final GetTapeFailureNotificationRegistrationsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapePartitionFailureNotificationRegistration")
@Action("SHOW")
@Resource("TAPE_PARTITION_FAILURE_NOTIFICATION_REGISTRATION")
GetTapePartitionFailureNotificationRegistrationSpectraS3Response getTapePartitionFailureNotificationRegistrationSpectraS3(final GetTapePartitionFailureNotificationRegistrationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapePartitionFailureNotificationRegistrationList")
@Action("LIST")
@Resource("TAPE_PARTITION_FAILURE_NOTIFICATION_REGISTRATION")
GetTapePartitionFailureNotificationRegistrationsSpectraS3Response getTapePartitionFailureNotificationRegistrationsSpectraS3(final GetTapePartitionFailureNotificationRegistrationsSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("FOLDER")
DeleteFolderRecursivelySpectraS3Response deleteFolderRecursivelySpectraS3(final DeleteFolderRecursivelySpectraS3Request request)
throws IOException;
@ResponsePayloadModel("String")
@Action("LIST")
@Resource("BLOB_PERSISTENCE")
GetBlobPersistenceSpectraS3Response getBlobPersistenceSpectraS3(final GetBlobPersistenceSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3Object")
@Action("SHOW")
@Resource("OBJECT")
GetObjectDetailsSpectraS3Response getObjectDetailsSpectraS3(final GetObjectDetailsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3ObjectList")
@Action("LIST")
@Resource("OBJECT")
GetObjectsDetailsSpectraS3Response getObjectsDetailsSpectraS3(final GetObjectsDetailsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DetailedS3ObjectList")
@Action("LIST")
@Resource("OBJECT")
GetObjectsWithFullDetailsSpectraS3Response getObjectsWithFullDetailsSpectraS3(final GetObjectsWithFullDetailsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PhysicalPlacement")
@Action("MODIFY")
@Resource("BUCKET")
GetPhysicalPlacementForObjectsSpectraS3Response getPhysicalPlacementForObjectsSpectraS3(final GetPhysicalPlacementForObjectsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BulkObjectList")
@Action("MODIFY")
@Resource("BUCKET")
GetPhysicalPlacementForObjectsWithFullDetailsSpectraS3Response getPhysicalPlacementForObjectsWithFullDetailsSpectraS3(final GetPhysicalPlacementForObjectsWithFullDetailsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3Object")
@Action("BULK_MODIFY")
@Resource("OBJECT")
UndeleteObjectSpectraS3Response undeleteObjectSpectraS3(final UndeleteObjectSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PhysicalPlacement")
@Action("SHOW")
@Resource("BUCKET")
VerifyPhysicalPlacementForObjectsSpectraS3Response verifyPhysicalPlacementForObjectsSpectraS3(final VerifyPhysicalPlacementForObjectsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BulkObjectList")
@Action("SHOW")
@Resource("BUCKET")
VerifyPhysicalPlacementForObjectsWithFullDetailsSpectraS3Response verifyPhysicalPlacementForObjectsWithFullDetailsSpectraS3(final VerifyPhysicalPlacementForObjectsWithFullDetailsSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("POOL")
CancelImportOnAllPoolsSpectraS3Response cancelImportOnAllPoolsSpectraS3(final CancelImportOnAllPoolsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Pool")
@Action("MODIFY")
@Resource("POOL")
CancelImportPoolSpectraS3Response cancelImportPoolSpectraS3(final CancelImportPoolSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("POOL")
CancelVerifyOnAllPoolsSpectraS3Response cancelVerifyOnAllPoolsSpectraS3(final CancelVerifyOnAllPoolsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Pool")
@Action("MODIFY")
@Resource("POOL")
CancelVerifyPoolSpectraS3Response cancelVerifyPoolSpectraS3(final CancelVerifyPoolSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("POOL")
CompactAllPoolsSpectraS3Response compactAllPoolsSpectraS3(final CompactAllPoolsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Pool")
@Action("MODIFY")
@Resource("POOL")
CompactPoolSpectraS3Response compactPoolSpectraS3(final CompactPoolSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PoolPartition")
@Action("CREATE")
@Resource("POOL_PARTITION")
PutPoolPartitionSpectraS3Response putPoolPartitionSpectraS3(final PutPoolPartitionSpectraS3Request request)
throws IOException;
@Action("MODIFY")
@Resource("POOL")
DeallocatePoolSpectraS3Response deallocatePoolSpectraS3(final DeallocatePoolSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("POOL")
DeletePermanentlyLostPoolSpectraS3Response deletePermanentlyLostPoolSpectraS3(final DeletePermanentlyLostPoolSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("POOL_FAILURE")
DeletePoolFailureSpectraS3Response deletePoolFailureSpectraS3(final DeletePoolFailureSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("POOL_PARTITION")
DeletePoolPartitionSpectraS3Response deletePoolPartitionSpectraS3(final DeletePoolPartitionSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("POOL_ENVIRONMENT")
ForcePoolEnvironmentRefreshSpectraS3Response forcePoolEnvironmentRefreshSpectraS3(final ForcePoolEnvironmentRefreshSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("POOL")
FormatAllForeignPoolsSpectraS3Response formatAllForeignPoolsSpectraS3(final FormatAllForeignPoolsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Pool")
@Action("MODIFY")
@Resource("POOL")
FormatForeignPoolSpectraS3Response formatForeignPoolSpectraS3(final FormatForeignPoolSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BulkObjectList")
@Action("SHOW")
@Resource("POOL")
GetBlobsOnPoolSpectraS3Response getBlobsOnPoolSpectraS3(final GetBlobsOnPoolSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PoolFailureList")
@Action("LIST")
@Resource("POOL_FAILURE")
GetPoolFailuresSpectraS3Response getPoolFailuresSpectraS3(final GetPoolFailuresSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PoolPartition")
@Action("SHOW")
@Resource("POOL_PARTITION")
GetPoolPartitionSpectraS3Response getPoolPartitionSpectraS3(final GetPoolPartitionSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PoolPartitionList")
@Action("LIST")
@Resource("POOL_PARTITION")
GetPoolPartitionsSpectraS3Response getPoolPartitionsSpectraS3(final GetPoolPartitionsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Pool")
@Action("SHOW")
@Resource("POOL")
GetPoolSpectraS3Response getPoolSpectraS3(final GetPoolSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PoolList")
@Action("LIST")
@Resource("POOL")
GetPoolsSpectraS3Response getPoolsSpectraS3(final GetPoolsSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("POOL")
ImportAllPoolsSpectraS3Response importAllPoolsSpectraS3(final ImportAllPoolsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Pool")
@Action("MODIFY")
@Resource("POOL")
ImportPoolSpectraS3Response importPoolSpectraS3(final ImportPoolSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("POOL")
ModifyAllPoolsSpectraS3Response modifyAllPoolsSpectraS3(final ModifyAllPoolsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("PoolPartition")
@Action("MODIFY")
@Resource("POOL_PARTITION")
ModifyPoolPartitionSpectraS3Response modifyPoolPartitionSpectraS3(final ModifyPoolPartitionSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Pool")
@Action("MODIFY")
@Resource("POOL")
ModifyPoolSpectraS3Response modifyPoolSpectraS3(final ModifyPoolSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("POOL")
VerifyAllPoolsSpectraS3Response verifyAllPoolsSpectraS3(final VerifyAllPoolsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Pool")
@Action("MODIFY")
@Resource("POOL")
VerifyPoolSpectraS3Response verifyPoolSpectraS3(final VerifyPoolSpectraS3Request request)
throws IOException;
@Action("MODIFY")
@Resource("STORAGE_DOMAIN")
ConvertStorageDomainToDs3TargetSpectraS3Response convertStorageDomainToDs3TargetSpectraS3(final ConvertStorageDomainToDs3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomainMember")
@Action("CREATE")
@Resource("STORAGE_DOMAIN_MEMBER")
PutPoolStorageDomainMemberSpectraS3Response putPoolStorageDomainMemberSpectraS3(final PutPoolStorageDomainMemberSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomain")
@Action("CREATE")
@Resource("STORAGE_DOMAIN")
PutStorageDomainSpectraS3Response putStorageDomainSpectraS3(final PutStorageDomainSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomainMember")
@Action("CREATE")
@Resource("STORAGE_DOMAIN_MEMBER")
PutTapeStorageDomainMemberSpectraS3Response putTapeStorageDomainMemberSpectraS3(final PutTapeStorageDomainMemberSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("STORAGE_DOMAIN_FAILURE")
DeleteStorageDomainFailureSpectraS3Response deleteStorageDomainFailureSpectraS3(final DeleteStorageDomainFailureSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("STORAGE_DOMAIN_MEMBER")
DeleteStorageDomainMemberSpectraS3Response deleteStorageDomainMemberSpectraS3(final DeleteStorageDomainMemberSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("STORAGE_DOMAIN")
DeleteStorageDomainSpectraS3Response deleteStorageDomainSpectraS3(final DeleteStorageDomainSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomainFailureList")
@Action("LIST")
@Resource("STORAGE_DOMAIN_FAILURE")
GetStorageDomainFailuresSpectraS3Response getStorageDomainFailuresSpectraS3(final GetStorageDomainFailuresSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomainMember")
@Action("SHOW")
@Resource("STORAGE_DOMAIN_MEMBER")
GetStorageDomainMemberSpectraS3Response getStorageDomainMemberSpectraS3(final GetStorageDomainMemberSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomainMemberList")
@Action("LIST")
@Resource("STORAGE_DOMAIN_MEMBER")
GetStorageDomainMembersSpectraS3Response getStorageDomainMembersSpectraS3(final GetStorageDomainMembersSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomain")
@Action("SHOW")
@Resource("STORAGE_DOMAIN")
GetStorageDomainSpectraS3Response getStorageDomainSpectraS3(final GetStorageDomainSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomainList")
@Action("LIST")
@Resource("STORAGE_DOMAIN")
GetStorageDomainsSpectraS3Response getStorageDomainsSpectraS3(final GetStorageDomainsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomainMember")
@Action("MODIFY")
@Resource("STORAGE_DOMAIN_MEMBER")
ModifyStorageDomainMemberSpectraS3Response modifyStorageDomainMemberSpectraS3(final ModifyStorageDomainMemberSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("StorageDomain")
@Action("MODIFY")
@Resource("STORAGE_DOMAIN")
ModifyStorageDomainSpectraS3Response modifyStorageDomainSpectraS3(final ModifyStorageDomainSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("FEATURE_KEY")
ForceFeatureKeyValidationSpectraS3Response forceFeatureKeyValidationSpectraS3(final ForceFeatureKeyValidationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("FeatureKeyList")
@Action("LIST")
@Resource("FEATURE_KEY")
GetFeatureKeysSpectraS3Response getFeatureKeysSpectraS3(final GetFeatureKeysSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SystemFailureList")
@Action("LIST")
@Resource("SYSTEM_FAILURE")
GetSystemFailuresSpectraS3Response getSystemFailuresSpectraS3(final GetSystemFailuresSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SystemInformation")
@Action("LIST")
@Resource("SYSTEM_INFORMATION")
GetSystemInformationSpectraS3Response getSystemInformationSpectraS3(final GetSystemInformationSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPathBackend")
@Action("BULK_MODIFY")
@Resource("INSTANCE_IDENTIFIER")
ResetInstanceIdentifierSpectraS3Response resetInstanceIdentifierSpectraS3(final ResetInstanceIdentifierSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("HealthVerificationResult")
@Action("LIST")
@Resource("SYSTEM_HEALTH")
VerifySystemHealthSpectraS3Response verifySystemHealthSpectraS3(final VerifySystemHealthSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
CancelEjectOnAllTapesSpectraS3Response cancelEjectOnAllTapesSpectraS3(final CancelEjectOnAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
CancelEjectTapeSpectraS3Response cancelEjectTapeSpectraS3(final CancelEjectTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
CancelFormatOnAllTapesSpectraS3Response cancelFormatOnAllTapesSpectraS3(final CancelFormatOnAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
CancelFormatTapeSpectraS3Response cancelFormatTapeSpectraS3(final CancelFormatTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
CancelImportOnAllTapesSpectraS3Response cancelImportOnAllTapesSpectraS3(final CancelImportOnAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
CancelImportTapeSpectraS3Response cancelImportTapeSpectraS3(final CancelImportTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
CancelOnlineOnAllTapesSpectraS3Response cancelOnlineOnAllTapesSpectraS3(final CancelOnlineOnAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
CancelOnlineTapeSpectraS3Response cancelOnlineTapeSpectraS3(final CancelOnlineTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
CancelVerifyOnAllTapesSpectraS3Response cancelVerifyOnAllTapesSpectraS3(final CancelVerifyOnAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
CancelVerifyTapeSpectraS3Response cancelVerifyTapeSpectraS3(final CancelVerifyTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeDrive")
@Action("MODIFY")
@Resource("TAPE_DRIVE")
CleanTapeDriveSpectraS3Response cleanTapeDriveSpectraS3(final CleanTapeDriveSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeDensityDirective")
@Action("CREATE")
@Resource("TAPE_DENSITY_DIRECTIVE")
PutTapeDensityDirectiveSpectraS3Response putTapeDensityDirectiveSpectraS3(final PutTapeDensityDirectiveSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("TAPE")
DeletePermanentlyLostTapeSpectraS3Response deletePermanentlyLostTapeSpectraS3(final DeletePermanentlyLostTapeSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("TAPE_DENSITY_DIRECTIVE")
DeleteTapeDensityDirectiveSpectraS3Response deleteTapeDensityDirectiveSpectraS3(final DeleteTapeDensityDirectiveSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("TAPE_DRIVE")
DeleteTapeDriveSpectraS3Response deleteTapeDriveSpectraS3(final DeleteTapeDriveSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("TAPE_FAILURE")
DeleteTapeFailureSpectraS3Response deleteTapeFailureSpectraS3(final DeleteTapeFailureSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("TAPE_PARTITION_FAILURE")
DeleteTapePartitionFailureSpectraS3Response deleteTapePartitionFailureSpectraS3(final DeleteTapePartitionFailureSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("TAPE_PARTITION")
DeleteTapePartitionSpectraS3Response deleteTapePartitionSpectraS3(final DeleteTapePartitionSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
EjectAllTapesSpectraS3Response ejectAllTapesSpectraS3(final EjectAllTapesSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("TAPE")
EjectStorageDomainBlobsSpectraS3Response ejectStorageDomainBlobsSpectraS3(final EjectStorageDomainBlobsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
EjectStorageDomainSpectraS3Response ejectStorageDomainSpectraS3(final EjectStorageDomainSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
EjectTapeSpectraS3Response ejectTapeSpectraS3(final EjectTapeSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("TAPE_ENVIRONMENT")
ForceTapeEnvironmentRefreshSpectraS3Response forceTapeEnvironmentRefreshSpectraS3(final ForceTapeEnvironmentRefreshSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
FormatAllTapesSpectraS3Response formatAllTapesSpectraS3(final FormatAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
FormatTapeSpectraS3Response formatTapeSpectraS3(final FormatTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BulkObjectList")
@Action("SHOW")
@Resource("TAPE")
GetBlobsOnTapeSpectraS3Response getBlobsOnTapeSpectraS3(final GetBlobsOnTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeDensityDirective")
@Action("SHOW")
@Resource("TAPE_DENSITY_DIRECTIVE")
GetTapeDensityDirectiveSpectraS3Response getTapeDensityDirectiveSpectraS3(final GetTapeDensityDirectiveSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeDensityDirectiveList")
@Action("LIST")
@Resource("TAPE_DENSITY_DIRECTIVE")
GetTapeDensityDirectivesSpectraS3Response getTapeDensityDirectivesSpectraS3(final GetTapeDensityDirectivesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeDrive")
@Action("SHOW")
@Resource("TAPE_DRIVE")
GetTapeDriveSpectraS3Response getTapeDriveSpectraS3(final GetTapeDriveSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeDriveList")
@Action("LIST")
@Resource("TAPE_DRIVE")
GetTapeDrivesSpectraS3Response getTapeDrivesSpectraS3(final GetTapeDrivesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DetailedTapeFailureList")
@Action("LIST")
@Resource("TAPE_FAILURE")
GetTapeFailuresSpectraS3Response getTapeFailuresSpectraS3(final GetTapeFailuresSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeLibraryList")
@Action("LIST")
@Resource("TAPE_LIBRARY")
GetTapeLibrariesSpectraS3Response getTapeLibrariesSpectraS3(final GetTapeLibrariesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeLibrary")
@Action("SHOW")
@Resource("TAPE_LIBRARY")
GetTapeLibrarySpectraS3Response getTapeLibrarySpectraS3(final GetTapeLibrarySpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapePartitionFailureList")
@Action("LIST")
@Resource("TAPE_PARTITION_FAILURE")
GetTapePartitionFailuresSpectraS3Response getTapePartitionFailuresSpectraS3(final GetTapePartitionFailuresSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapePartition")
@Action("SHOW")
@Resource("TAPE_PARTITION")
GetTapePartitionSpectraS3Response getTapePartitionSpectraS3(final GetTapePartitionSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DetailedTapePartition")
@Action("SHOW")
@Resource("TAPE_PARTITION")
GetTapePartitionWithFullDetailsSpectraS3Response getTapePartitionWithFullDetailsSpectraS3(final GetTapePartitionWithFullDetailsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapePartitionList")
@Action("LIST")
@Resource("TAPE_PARTITION")
GetTapePartitionsSpectraS3Response getTapePartitionsSpectraS3(final GetTapePartitionsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("NamedDetailedTapePartitionList")
@Action("LIST")
@Resource("TAPE_PARTITION")
GetTapePartitionsWithFullDetailsSpectraS3Response getTapePartitionsWithFullDetailsSpectraS3(final GetTapePartitionsWithFullDetailsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("SHOW")
@Resource("TAPE")
GetTapeSpectraS3Response getTapeSpectraS3(final GetTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeList")
@Action("LIST")
@Resource("TAPE")
GetTapesSpectraS3Response getTapesSpectraS3(final GetTapesSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("TAPE")
ImportAllTapesSpectraS3Response importAllTapesSpectraS3(final ImportAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
ImportTapeSpectraS3Response importTapeSpectraS3(final ImportTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
InspectAllTapesSpectraS3Response inspectAllTapesSpectraS3(final InspectAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
InspectTapeSpectraS3Response inspectTapeSpectraS3(final InspectTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
MarkTapeForCompactionSpectraS3Response markTapeForCompactionSpectraS3(final MarkTapeForCompactionSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("TAPE_PARTITION")
ModifyAllTapePartitionsSpectraS3Response modifyAllTapePartitionsSpectraS3(final ModifyAllTapePartitionsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeDrive")
@Action("MODIFY")
@Resource("TAPE_DRIVE")
ModifyTapeDriveSpectraS3Response modifyTapeDriveSpectraS3(final ModifyTapeDriveSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapePartition")
@Action("MODIFY")
@Resource("TAPE_PARTITION")
ModifyTapePartitionSpectraS3Response modifyTapePartitionSpectraS3(final ModifyTapePartitionSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
ModifyTapeSpectraS3Response modifyTapeSpectraS3(final ModifyTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
OnlineAllTapesSpectraS3Response onlineAllTapesSpectraS3(final OnlineAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
OnlineTapeSpectraS3Response onlineTapeSpectraS3(final OnlineTapeSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("TAPE")
RawImportAllTapesSpectraS3Response rawImportAllTapesSpectraS3(final RawImportAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
RawImportTapeSpectraS3Response rawImportTapeSpectraS3(final RawImportTapeSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("TapeFailureList")
@Action("BULK_MODIFY")
@Resource("TAPE")
VerifyAllTapesSpectraS3Response verifyAllTapesSpectraS3(final VerifyAllTapesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Tape")
@Action("MODIFY")
@Resource("TAPE")
VerifyTapeSpectraS3Response verifyTapeSpectraS3(final VerifyTapeSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("TARGET_ENVIRONMENT")
ForceTargetEnvironmentRefreshSpectraS3Response forceTargetEnvironmentRefreshSpectraS3(final ForceTargetEnvironmentRefreshSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTargetBucketName")
@Action("CREATE")
@Resource("AZURE_TARGET_BUCKET_NAME")
PutAzureTargetBucketNameSpectraS3Response putAzureTargetBucketNameSpectraS3(final PutAzureTargetBucketNameSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTargetReadPreference")
@Action("CREATE")
@Resource("AZURE_TARGET_READ_PREFERENCE")
PutAzureTargetReadPreferenceSpectraS3Response putAzureTargetReadPreferenceSpectraS3(final PutAzureTargetReadPreferenceSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("AZURE_TARGET_BUCKET_NAME")
DeleteAzureTargetBucketNameSpectraS3Response deleteAzureTargetBucketNameSpectraS3(final DeleteAzureTargetBucketNameSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("AZURE_TARGET_FAILURE")
DeleteAzureTargetFailureSpectraS3Response deleteAzureTargetFailureSpectraS3(final DeleteAzureTargetFailureSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("AZURE_TARGET_READ_PREFERENCE")
DeleteAzureTargetReadPreferenceSpectraS3Response deleteAzureTargetReadPreferenceSpectraS3(final DeleteAzureTargetReadPreferenceSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("AZURE_TARGET")
DeleteAzureTargetSpectraS3Response deleteAzureTargetSpectraS3(final DeleteAzureTargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTargetBucketNameList")
@Action("LIST")
@Resource("AZURE_TARGET_BUCKET_NAME")
GetAzureTargetBucketNamesSpectraS3Response getAzureTargetBucketNamesSpectraS3(final GetAzureTargetBucketNamesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTargetFailureList")
@Action("LIST")
@Resource("AZURE_TARGET_FAILURE")
GetAzureTargetFailuresSpectraS3Response getAzureTargetFailuresSpectraS3(final GetAzureTargetFailuresSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTargetReadPreference")
@Action("SHOW")
@Resource("AZURE_TARGET_READ_PREFERENCE")
GetAzureTargetReadPreferenceSpectraS3Response getAzureTargetReadPreferenceSpectraS3(final GetAzureTargetReadPreferenceSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTargetReadPreferenceList")
@Action("LIST")
@Resource("AZURE_TARGET_READ_PREFERENCE")
GetAzureTargetReadPreferencesSpectraS3Response getAzureTargetReadPreferencesSpectraS3(final GetAzureTargetReadPreferencesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTarget")
@Action("SHOW")
@Resource("AZURE_TARGET")
GetAzureTargetSpectraS3Response getAzureTargetSpectraS3(final GetAzureTargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTargetList")
@Action("LIST")
@Resource("AZURE_TARGET")
GetAzureTargetsSpectraS3Response getAzureTargetsSpectraS3(final GetAzureTargetsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BulkObjectList")
@Action("SHOW")
@Resource("AZURE_TARGET")
GetBlobsOnAzureTargetSpectraS3Response getBlobsOnAzureTargetSpectraS3(final GetBlobsOnAzureTargetSpectraS3Request request)
throws IOException;
@Action("MODIFY")
@Resource("AZURE_TARGET")
ImportAzureTargetSpectraS3Response importAzureTargetSpectraS3(final ImportAzureTargetSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("AZURE_TARGET")
ModifyAllAzureTargetsSpectraS3Response modifyAllAzureTargetsSpectraS3(final ModifyAllAzureTargetsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTarget")
@Action("MODIFY")
@Resource("AZURE_TARGET")
ModifyAzureTargetSpectraS3Response modifyAzureTargetSpectraS3(final ModifyAzureTargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTarget")
@Action("CREATE")
@Resource("AZURE_TARGET")
RegisterAzureTargetSpectraS3Response registerAzureTargetSpectraS3(final RegisterAzureTargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("AzureTarget")
@Action("MODIFY")
@Resource("AZURE_TARGET")
VerifyAzureTargetSpectraS3Response verifyAzureTargetSpectraS3(final VerifyAzureTargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3TargetReadPreference")
@Action("CREATE")
@Resource("DS3_TARGET_READ_PREFERENCE")
PutDs3TargetReadPreferenceSpectraS3Response putDs3TargetReadPreferenceSpectraS3(final PutDs3TargetReadPreferenceSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("DS3_TARGET_FAILURE")
DeleteDs3TargetFailureSpectraS3Response deleteDs3TargetFailureSpectraS3(final DeleteDs3TargetFailureSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("DS3_TARGET_READ_PREFERENCE")
DeleteDs3TargetReadPreferenceSpectraS3Response deleteDs3TargetReadPreferenceSpectraS3(final DeleteDs3TargetReadPreferenceSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("DS3_TARGET")
DeleteDs3TargetSpectraS3Response deleteDs3TargetSpectraS3(final DeleteDs3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BulkObjectList")
@Action("SHOW")
@Resource("DS3_TARGET")
GetBlobsOnDs3TargetSpectraS3Response getBlobsOnDs3TargetSpectraS3(final GetBlobsOnDs3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("DataPolicyList")
@Action("SHOW")
@Resource("DS3_TARGET_DATA_POLICIES")
GetDs3TargetDataPoliciesSpectraS3Response getDs3TargetDataPoliciesSpectraS3(final GetDs3TargetDataPoliciesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3TargetFailureList")
@Action("LIST")
@Resource("DS3_TARGET_FAILURE")
GetDs3TargetFailuresSpectraS3Response getDs3TargetFailuresSpectraS3(final GetDs3TargetFailuresSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3TargetReadPreference")
@Action("SHOW")
@Resource("DS3_TARGET_READ_PREFERENCE")
GetDs3TargetReadPreferenceSpectraS3Response getDs3TargetReadPreferenceSpectraS3(final GetDs3TargetReadPreferenceSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3TargetReadPreferenceList")
@Action("LIST")
@Resource("DS3_TARGET_READ_PREFERENCE")
GetDs3TargetReadPreferencesSpectraS3Response getDs3TargetReadPreferencesSpectraS3(final GetDs3TargetReadPreferencesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3Target")
@Action("SHOW")
@Resource("DS3_TARGET")
GetDs3TargetSpectraS3Response getDs3TargetSpectraS3(final GetDs3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3TargetList")
@Action("LIST")
@Resource("DS3_TARGET")
GetDs3TargetsSpectraS3Response getDs3TargetsSpectraS3(final GetDs3TargetsSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("DS3_TARGET")
ModifyAllDs3TargetsSpectraS3Response modifyAllDs3TargetsSpectraS3(final ModifyAllDs3TargetsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3Target")
@Action("MODIFY")
@Resource("DS3_TARGET")
ModifyDs3TargetSpectraS3Response modifyDs3TargetSpectraS3(final ModifyDs3TargetSpectraS3Request request)
throws IOException;
@Action("MODIFY")
@Resource("DS3_TARGET")
PairBackRegisteredDs3TargetSpectraS3Response pairBackRegisteredDs3TargetSpectraS3(final PairBackRegisteredDs3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3Target")
@Action("CREATE")
@Resource("DS3_TARGET")
RegisterDs3TargetSpectraS3Response registerDs3TargetSpectraS3(final RegisterDs3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("Ds3Target")
@Action("MODIFY")
@Resource("DS3_TARGET")
VerifyDs3TargetSpectraS3Response verifyDs3TargetSpectraS3(final VerifyDs3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3TargetBucketName")
@Action("CREATE")
@Resource("S3_TARGET_BUCKET_NAME")
PutS3TargetBucketNameSpectraS3Response putS3TargetBucketNameSpectraS3(final PutS3TargetBucketNameSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3TargetReadPreference")
@Action("CREATE")
@Resource("S3_TARGET_READ_PREFERENCE")
PutS3TargetReadPreferenceSpectraS3Response putS3TargetReadPreferenceSpectraS3(final PutS3TargetReadPreferenceSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("S3_TARGET_BUCKET_NAME")
DeleteS3TargetBucketNameSpectraS3Response deleteS3TargetBucketNameSpectraS3(final DeleteS3TargetBucketNameSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("S3_TARGET_FAILURE")
DeleteS3TargetFailureSpectraS3Response deleteS3TargetFailureSpectraS3(final DeleteS3TargetFailureSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("S3_TARGET_READ_PREFERENCE")
DeleteS3TargetReadPreferenceSpectraS3Response deleteS3TargetReadPreferenceSpectraS3(final DeleteS3TargetReadPreferenceSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("S3_TARGET")
DeleteS3TargetSpectraS3Response deleteS3TargetSpectraS3(final DeleteS3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("BulkObjectList")
@Action("SHOW")
@Resource("S3_TARGET")
GetBlobsOnS3TargetSpectraS3Response getBlobsOnS3TargetSpectraS3(final GetBlobsOnS3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3TargetBucketNameList")
@Action("LIST")
@Resource("S3_TARGET_BUCKET_NAME")
GetS3TargetBucketNamesSpectraS3Response getS3TargetBucketNamesSpectraS3(final GetS3TargetBucketNamesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3TargetFailureList")
@Action("LIST")
@Resource("S3_TARGET_FAILURE")
GetS3TargetFailuresSpectraS3Response getS3TargetFailuresSpectraS3(final GetS3TargetFailuresSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3TargetReadPreference")
@Action("SHOW")
@Resource("S3_TARGET_READ_PREFERENCE")
GetS3TargetReadPreferenceSpectraS3Response getS3TargetReadPreferenceSpectraS3(final GetS3TargetReadPreferenceSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3TargetReadPreferenceList")
@Action("LIST")
@Resource("S3_TARGET_READ_PREFERENCE")
GetS3TargetReadPreferencesSpectraS3Response getS3TargetReadPreferencesSpectraS3(final GetS3TargetReadPreferencesSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3Target")
@Action("SHOW")
@Resource("S3_TARGET")
GetS3TargetSpectraS3Response getS3TargetSpectraS3(final GetS3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3TargetList")
@Action("LIST")
@Resource("S3_TARGET")
GetS3TargetsSpectraS3Response getS3TargetsSpectraS3(final GetS3TargetsSpectraS3Request request)
throws IOException;
@Action("MODIFY")
@Resource("S3_TARGET")
ImportS3TargetSpectraS3Response importS3TargetSpectraS3(final ImportS3TargetSpectraS3Request request)
throws IOException;
@Action("BULK_MODIFY")
@Resource("S3_TARGET")
ModifyAllS3TargetsSpectraS3Response modifyAllS3TargetsSpectraS3(final ModifyAllS3TargetsSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3Target")
@Action("MODIFY")
@Resource("S3_TARGET")
ModifyS3TargetSpectraS3Response modifyS3TargetSpectraS3(final ModifyS3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3Target")
@Action("CREATE")
@Resource("S3_TARGET")
RegisterS3TargetSpectraS3Response registerS3TargetSpectraS3(final RegisterS3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("S3Target")
@Action("MODIFY")
@Resource("S3_TARGET")
VerifyS3TargetSpectraS3Response verifyS3TargetSpectraS3(final VerifyS3TargetSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SpectraUser")
@Action("CREATE")
@Resource("USER")
DelegateCreateUserSpectraS3Response delegateCreateUserSpectraS3(final DelegateCreateUserSpectraS3Request request)
throws IOException;
@Action("DELETE")
@Resource("USER")
DelegateDeleteUserSpectraS3Response delegateDeleteUserSpectraS3(final DelegateDeleteUserSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SpectraUser")
@Action("SHOW")
@Resource("USER")
GetUserSpectraS3Response getUserSpectraS3(final GetUserSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SpectraUserList")
@Action("LIST")
@Resource("USER")
GetUsersSpectraS3Response getUsersSpectraS3(final GetUsersSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SpectraUser")
@Action("MODIFY")
@Resource("USER")
ModifyUserSpectraS3Response modifyUserSpectraS3(final ModifyUserSpectraS3Request request)
throws IOException;
@ResponsePayloadModel("SpectraUser")
@Action("MODIFY")
@Resource("USER")
RegenerateUserSecretKeySpectraS3Response regenerateUserSecretKeySpectraS3(final RegenerateUserSecretKeySpectraS3Request request)
throws IOException;
GetObjectResponse getObject(final GetObjectRequest request)
throws IOException;
GetObjectResponse getObject(final GetObjectRequest request,
final Function<GetObjectCustomParserParameters, GetObjectResponse> responseParser) throws IOException;
Ds3Client newForNode(final JobNode node);
}
| |
/*******************************************************************************
* Copyright (c) 2015 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.ibm.ws.lars.rest;
import static com.ibm.ws.lars.rest.RepositoryContext.RC_REJECT;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.http.ParseException;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.entity.ContentType;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.ibm.ws.lars.rest.RepositoryContext.Protocol;
import com.ibm.ws.lars.rest.exceptions.InvalidJsonAssetException;
import com.ibm.ws.lars.rest.model.Asset;
import com.ibm.ws.lars.rest.model.AssetList;
import com.ibm.ws.lars.rest.model.Attachment;
import com.ibm.ws.lars.testutils.FatUtils;
/**
* Test that security permissions work correctly.
*/
@RunWith(Parameterized.class)
public class PermissionTest {
@Rule
public RepositoryContext adminContext;
@Rule
public RepositoryContext userContext;
/**
* HTTP URL for the test instance where read operations are restricted to users with the User
* role.
*/
private static final String RESTRICTED_URL_HTTP = RepositoryContext.DEFAULT_URLS.get(Protocol.HTTP);
/**
* HTTPS URL for the test instance where read operations are restricted to users with the User
* role.
*/
private static final String RESTRICTED_URL_HTTPS = RepositoryContext.DEFAULT_URLS.get(Protocol.HTTPS);
/**
* HTTP URL for the test instance where read operations are unrestricted.
* <p>
* Write operations are still restricted to users with the Admin role.
*/
private static final String UNRESTRICTED_URL_HTTP = "http://localhost:" + FatUtils.LIBERTY_PORT_HTTP + "/unrestricted" + FatUtils.LARS_APPLICATION_ROOT;
/**
* HTTPS URL for the test instance where read operations are unrestricted.
* <p>
* Write operations are still restricted to users with the Admin role.
*/
private static final String UNRESTRICTED_URL_HTTPS = "https://localhost:" + FatUtils.LIBERTY_PORT_HTTPS + "/unrestricted" + FatUtils.LARS_APPLICATION_ROOT;
/**
* The available roles that we expect test users to be mapped to by the test server
* configuration.
*/
private enum Role {
ADMIN,
USER,
NONE;
public boolean isAdmin() {
return this == ADMIN;
}
public boolean isUser() {
return this == ADMIN || this == USER;
}
}
/**
* The users to test with.
* <p>
* Each user has a username and password and two roles. One is the role we expect the user to be
* mapped to for the unrestricted configuration. The other is the role we expect the user to be
* mapped to in the restricted configuration.
* <p>
* This enum is used to build the test parameters in {@link PermissionTest#makeParameters()}.
*/
private enum User {
ADMIN("admin", "passw0rd", Role.ADMIN, Role.ADMIN),
USER("user", "passw0rd", Role.USER, Role.USER),
NO_ROLE("noRoleUser", "passw0rd", Role.USER, Role.NONE),
BAD_ADMIN_PW("admin", "wrongPassw0rd", Role.USER, Role.NONE),
BAD_USER_PW("user", "wrongPassw0rd", Role.USER, Role.NONE),
UNAUTHENTICATED(null, null, Role.USER, Role.NONE);
String username;
String password;
Role restrictedConfigRole;
Role unrestrictedConfigRole;
private User(String username, String password, Role unrestrictedConfigRole, Role restrictedConfigRole) {
this.username = username;
this.password = password;
this.restrictedConfigRole = restrictedConfigRole;
this.unrestrictedConfigRole = unrestrictedConfigRole;
}
}
private Asset testAsset;
private Asset createdAsset;
private Asset assetWithAttachments;
private Asset createAssetWithAttachments;
private String attachmentName;
private byte[] attachmentContent;
private Attachment attachment;
private Attachment createdAttachment;
/**
* The role that we expect the user in the userContext to be mapped to
* <p>
* Tests should look at this field when deciding whether or not operations that they run from
* the userContext should succeed or fail.
*/
private final Role role;
/**
* Build the test parameters
* <p>
* We want to test the same methods with multiple users which we expect to map to different
* roles. JUnit lets us construct a list of test parameters and each test will run which each
* set of test parameters.
* <p>
* The parameters we are passing are URL, username, password, expected role and label. The label
* is just used to name the test so we can tell them apart in the test results.
*
* @return the test parameters
*/
@Parameters(name = "{4}")
public static Collection<Object[]> makeParameters() {
Collection<Object[]> data = new ArrayList<>();
for (User user : User.values()) {
data.add(new Object[] { RESTRICTED_URL_HTTP, user.username, user.password, user.restrictedConfigRole, user + " - restricted - http" });
data.add(new Object[] { UNRESTRICTED_URL_HTTP, user.username, user.password, user.unrestrictedConfigRole, user + " - unrestricted - http" });
data.add(new Object[] { RESTRICTED_URL_HTTPS, user.username, user.password, user.restrictedConfigRole, user + " - restricted - https" });
data.add(new Object[] { UNRESTRICTED_URL_HTTPS, user.username, user.password, user.unrestrictedConfigRole, user + " - unrestricted - https" });
}
System.out.println("sending data: " + data.size());
return data;
}
/**
* Process the test parameters and set up the adminContext and userContext.
*/
public PermissionTest(String url, String username, String password, Role role, String label) {
adminContext = RepositoryContext.createAsAdmin(url, true);
userContext = new RepositoryContext(url, username, password, false);
this.role = role;
}
@Before
public void setUp() throws IOException, InvalidJsonAssetException {
testAsset = AssetUtils.getTestAsset();
createdAsset = adminContext.addAssetNoAttachments(testAsset);
assetWithAttachments = AssetUtils.getTestAsset();
createAssetWithAttachments = adminContext.addAssetNoAttachments(assetWithAttachments);
attachmentName = "nocontent.txt";
attachmentContent = "I am the content.\nThere is not much content to be had.\n".getBytes(StandardCharsets.UTF_8);
attachment = AssetUtils.getTestAttachmentWithContent();
createdAttachment = adminContext.doPostAttachmentWithContent(createAssetWithAttachments.get_id(),
attachmentName,
attachment,
attachmentContent,
ContentType.APPLICATION_OCTET_STREAM);
}
/**
*
* GET /assets
*
* Allowed for ADMIN and USER
*/
@Test
public void testGetAllAssets() throws InvalidJsonAssetException, ParseException, IOException {
if (role.isUser()) {
AssetList assets = userContext.doGetAllAssets(200);
assertEquals("Wrong number of assets", 2, assets.size());
} else {
userContext.doGetAllAssetsBad(RC_REJECT);
}
}
/**
* POST /assets
*
* Allowed for ADMIN
*
*/
@Test
public void testPostAsset() throws InvalidJsonAssetException, IOException {
Asset asset = AssetUtils.getTestAsset();
if (role.isAdmin()) {
Asset returnedAsset = userContext.addAssetNoAttachments(asset);
AssetUtils.assertUploadedAssetEquivalentToOriginal("Returned asset should match the asset that was uploaded", asset, returnedAsset);
} else {
userContext.addBadAsset(asset, RC_REJECT);
}
}
/**
* GET /assets/{assetId}
*
* Allowed for ADMIN and USER
*/
@Test
public void testGetAsset() throws InvalidJsonAssetException, IOException {
if (role.isUser()) {
Asset returnedAsset = userContext.getAsset(createdAsset.get_id());
AssetUtils.assertUploadedAssetEquivalentToOriginal("Returned asset should match the asset that was uploaded", testAsset, returnedAsset);
} else {
userContext.getBadAsset(createdAsset.get_id(), RC_REJECT);
}
}
/**
* DELETE /assets/{assetId}
*
* Allowed for ADMIN
*/
@Test
public void testDeleteAsset() throws IOException {
if (role.isAdmin()) {
userContext.deleteAsset(createdAsset.get_id(), 204);
} else {
userContext.deleteAsset(createdAsset.get_id(), RC_REJECT);
}
}
/**
* POST /assets/{assetId}/attachments Content-type: multipart/formdata
*
* Allowed for ADMIN
*
* @throws IOException
* @throws InvalidJsonAssetException
* @throws ClientProtocolException
*/
@Test
public void testPostAttachmentNoContent() throws ClientProtocolException, InvalidJsonAssetException, IOException {
Attachment attachment = AssetUtils.getTestAttachmentNoContent();
if (role.isAdmin()) {
userContext.doPostAttachmentNoContent(createdAsset.get_id(), "theAttachment", attachment);
} else {
userContext.doPostBadAttachmentNoContent(createdAsset.get_id(), "theAttachment", attachment, RC_REJECT, null);
}
}
/**
* POST /assets/{assetId}/attachments Content-type: application/json
*
* Allowed for ADMIN
*
*/
@Test
public void testPostAttachmentWithContent() throws ClientProtocolException, InvalidJsonAssetException, IOException {
byte[] content = "I am the content.\nThere is not much to me.\n".getBytes(StandardCharsets.UTF_8);
String name = "theAttachment";
if (role.isAdmin()) {
userContext.doPostAttachmentWithContent(createdAsset.get_id(),
name,
attachment,
content,
ContentType.APPLICATION_OCTET_STREAM);
} else {
userContext.doPostBadAttachmentWithContent(createdAsset.get_id(),
"I am the attachment!",
attachment,
content,
ContentType.APPLICATION_OCTET_STREAM,
RC_REJECT,
null);
}
}
/**
* DELETE /assets/{assetId}/attachments/{attachmentId}
*
* Allowed for ADMIN
*/
@Test
public void testDeleteAttachment() throws ClientProtocolException, IOException {
if (role.isAdmin()) {
userContext.doDeleteAttachment(createdAsset.get_id(), createdAttachment.get_id(), 204);
} else {
userContext.doDeleteAttachment(createdAsset.get_id(), createdAttachment.get_id(), RC_REJECT);
}
}
/**
* GET /assets/{assetId}/attachments/{attachmentId}/{name}
*
* Allowed for ADMIN and USER
*/
@Test
public void testGetAttachmentContent() throws IOException {
if (role.isUser()) {
byte[] returnedContent = userContext.doGetAttachmentContent(createAssetWithAttachments.get_id(), createdAttachment.get_id(), attachmentName);
Assert.assertArrayEquals("Returned content should be equal to that which was uploaded", attachmentContent, returnedContent);
} else {
userContext.doGetAttachmentContentInError(createAssetWithAttachments.get_id(), createdAttachment.get_id(), attachmentName, RC_REJECT, null);
}
}
/**
* PUT /assets/{assetId}/state
*
* Allowed for ADMIN
*
*/
@Test
public void testPutAssetState() throws InvalidJsonAssetException, IOException {
if (role.isAdmin()) {
userContext.updateAssetState(createdAsset.get_id(), Asset.StateAction.PUBLISH.getValue(), 200);
} else {
userContext.updateAssetState(createdAsset.get_id(), Asset.StateAction.PUBLISH.getValue(), RC_REJECT);
}
}
/**
* GET /assets/summary
*
* Allowed for ADMIN and USER
*/
@Test
public void testGetAssetSummary() throws Exception {
if (role.isUser()) {
userContext.getAssetSummary("fields=name");
} else {
userContext.getBadAssetSummary("fields=name", RC_REJECT);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.