gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package glredbook11;
import glredbook10.GLSkeleton;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.nio.ByteBuffer;
import javax.media.opengl.GL;
import javax.media.opengl.GL2;
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.GLCapabilities;
import javax.media.opengl.GLEventListener;
import javax.media.opengl.awt.GLJPanel;
import javax.media.opengl.glu.GLU;
import javax.swing.JFrame;
import com.jogamp.opengl.util.GLBuffers;
/**
* This program texture maps a checkerboard image onto two rectangles. This
* program demonstrates the wrapping modes, if the texture coordinates fall
* outside 0.0 and 1.0. Interaction: Pressing the 's' and 'S' keys switch the
* wrapping between clamping and repeating for the s parameter. The 't' and 'T'
* keys control the wrapping for the t parameter. If running this program on
* OpenGL 1.0, texture objects are not used.
*
* @author Kiet Le (Java port) Ported to JOGL 2.x by Claudio Eduardo Goes
*/
public class wrap//
extends GLSkeleton<GLJPanel>
implements GLEventListener, KeyListener {
private GLU glu;
private int texName[] = new int[1];
private static final int checkImageWidth = 64;
private static final int checkImageHeight = 64;
private static final int rgba = 4;
// private byte[][][] checkImage;
private ByteBuffer checkImageBuf = //
GLBuffers.newDirectByteBuffer(checkImageHeight * checkImageWidth * rgba);
private KeyEvent key;
@Override
protected GLJPanel createDrawable() {
GLCapabilities caps = new GLCapabilities(null);
//
GLJPanel panel = new GLJPanel(caps);
panel.addGLEventListener(this);
panel.addKeyListener(this);
return panel;
}
public static void main(String[] args) {
wrap demo = new wrap();
JFrame.setDefaultLookAndFeelDecorated(true);
JFrame frame = new JFrame("wrap");
frame.setSize(250, 250);
frame.setLocationRelativeTo(null);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().add(demo.drawable);
frame.setVisible(true);
demo.drawable.requestFocusInWindow();
}
public void init(GLAutoDrawable drawable) {
GL2 gl = drawable.getGL().getGL2();
glu = new GLU();
//
System.out.println("" + GL.GL_VERSION);
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
gl.glShadeModel(GL2.GL_FLAT);
gl.glEnable(GL.GL_DEPTH_TEST);
makeCheckImage();
gl.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1);
gl.glGenTextures(1, texName, 0);
gl.glBindTexture(GL2.GL_TEXTURE_2D, texName[0]);
gl
.glTexParameteri(GL2.GL_TEXTURE_2D, GL2.GL_TEXTURE_WRAP_S,
GL2.GL_REPEAT);
gl
.glTexParameteri(GL2.GL_TEXTURE_2D, GL2.GL_TEXTURE_WRAP_T,
GL2.GL_REPEAT);
gl.glTexParameteri(GL2.GL_TEXTURE_2D, GL2.GL_TEXTURE_MAG_FILTER,
GL.GL_NEAREST);
gl.glTexParameteri(GL2.GL_TEXTURE_2D, GL2.GL_TEXTURE_MIN_FILTER,
GL.GL_NEAREST);
gl.glTexImage2D(GL2.GL_TEXTURE_2D, 0, GL2.GL_RGBA, checkImageWidth,
checkImageHeight, 0, GL2.GL_RGBA, GL.GL_UNSIGNED_BYTE,
checkImageBuf);
}
public void display(GLAutoDrawable drawable) {
GL2 gl = drawable.getGL().getGL2();
//
gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT);
gl.glEnable(GL2.GL_TEXTURE_2D);
if (key != null) {
switch (key.getKeyChar()) {
case 's':
gl.glTexParameteri(GL2.GL_TEXTURE_2D, GL2.GL_TEXTURE_WRAP_S,
GL2.GL_CLAMP);
break;
case 'S':
gl.glTexParameteri(GL2.GL_TEXTURE_2D, GL2.GL_TEXTURE_WRAP_S,
GL2.GL_REPEAT);
break;
case 't':
gl.glTexParameteri(GL2.GL_TEXTURE_2D, GL2.GL_TEXTURE_WRAP_T,
GL2.GL_CLAMP);
break;
case 'T':
gl.glTexParameteri(GL2.GL_TEXTURE_2D, GL2.GL_TEXTURE_WRAP_T,
GL2.GL_REPEAT);
break;
default:
break;
}
}
gl.glTexEnvf(GL2.GL_TEXTURE_ENV, GL2.GL_TEXTURE_ENV_MODE, GL2.GL_DECAL);
gl.glBindTexture(GL2.GL_TEXTURE_2D, texName[0]);
gl.glBegin(GL2.GL_QUADS);
gl.glTexCoord2d(0.0, 0.0);
gl.glVertex3d(-2.0, -1.0, 0.0);
gl.glTexCoord2d(0.0, 3.0);
gl.glVertex3d(-2.0, 1.0, 0.0);
gl.glTexCoord2d(3.0, 3.0);
gl.glVertex3d(0.0, 1.0, 0.0);
gl.glTexCoord2d(3.0, 0.0);
gl.glVertex3d(0.0, -1.0, 0.0);
gl.glTexCoord2d(0.0, 0.0);
gl.glVertex3d(1.0, -1.0, 0.0);
gl.glTexCoord2d(0.0, 3.0);
gl.glVertex3d(1.0, 1.0, 0.0);
gl.glTexCoord2d(3.0, 3.0);
gl.glVertex3d(2.41421, 1.0, -1.41421);
gl.glTexCoord2d(3.0, 0.0);
gl.glVertex3d(2.41421, -1.0, -1.41421);
gl.glEnd();
gl.glFlush();
gl.glDisable(GL2.GL_TEXTURE_2D);
}
public void reshape(GLAutoDrawable drawable, int x, int y, int w, int h) {
GL2 gl = drawable.getGL().getGL2();
//
gl.glViewport(0, 0, w, h);
gl.glMatrixMode(GL2.GL_PROJECTION);
gl.glLoadIdentity();
glu.gluPerspective(60.0, (float) w / (float) h, 1.0, 30.0);
gl.glMatrixMode(GL2.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glTranslated(0.0, 0.0, -3.6);
}
public void displayChanged(GLAutoDrawable drawable, boolean modeChanged,
boolean deviceChanged) {
}
private void makeCheckImage() {
// byte c = (~(i & 0x8) ^ ~(j & 0x8)) * 255;
byte c = 0;
for (int i = 0; i < checkImageHeight; i++) {
for (int j = 0; j < checkImageWidth; j++) {
// c = ((((i&0x8)==0)^((j&0x8))==0))*255;C' version
c = (byte) ((((byte) ((i & 0x8) == 0 ? 0x00 : 0xff)//
^ (byte) ((j & 0x8) == 0 ? 0x00 : 0xff))));
System.out.print("" + (byte) c + " ");
// checkImage[i][j][0] = ( byte) c;
// checkImage[i][j][1] = ( byte) c;
// checkImage[i][j][2] = ( byte) c;
// checkImage[i][j][3] = ( byte) 0xff;
checkImageBuf.put((byte) c);
checkImageBuf.put((byte) c);
checkImageBuf.put((byte) c);
checkImageBuf.put((byte) 0xff);
}
System.out.println();
}
checkImageBuf.rewind();
}
public void keyTyped(KeyEvent key) {
}
public void keyPressed(KeyEvent key) {
this.key = key;
switch (key.getKeyCode()) {
case KeyEvent.VK_ESCAPE:
System.exit(0);
break;
default:
break;
}
super.refresh();
}
public void keyReleased(KeyEvent key) {
}
public void dispose(GLAutoDrawable arg0) {
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.benchmark.search.child;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.SizeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.node.Node;
import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.client.Requests.createIndexRequest;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
/**
*
*/
public class ChildSearchShortCircuitBenchmark {
public static void main(String[] args) throws Exception {
Settings settings = settingsBuilder()
.put("index.refresh_interval", "-1")
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put(SETTING_NUMBER_OF_REPLICAS, 0)
.build();
String clusterName = ChildSearchShortCircuitBenchmark.class.getSimpleName();
Node node1 = nodeBuilder().clusterName(clusterName)
.settings(settingsBuilder().put(settings).put("name", "node1"))
.node();
Client client = node1.client();
long PARENT_COUNT = SizeValue.parseSizeValue("10M").singles();
int BATCH = 100;
int QUERY_WARMUP = 5;
int QUERY_COUNT = 25;
String indexName = "test";
client.admin().cluster().prepareHealth(indexName).setWaitForGreenStatus().setTimeout("10s").execute().actionGet();
try {
client.admin().indices().create(createIndexRequest(indexName)).actionGet();
client.admin().indices().preparePutMapping(indexName).setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("child")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
Thread.sleep(5000);
StopWatch stopWatch = new StopWatch().start();
System.out.println("--> Indexing [" + PARENT_COUNT + "] parent document and some child documents");
long ITERS = PARENT_COUNT / BATCH;
int i = 1;
int counter = 0;
for (; i <= ITERS; i++) {
BulkRequestBuilder request = client.prepareBulk();
for (int j = 0; j < BATCH; j++) {
counter++;
request.add(Requests.indexRequest(indexName).type("parent").id(Integer.toString(counter))
.source(parentSource(counter)));
}
BulkResponse response = request.execute().actionGet();
if (response.hasFailures()) {
System.err.println("--> failures...");
}
if (((i * BATCH) % 10000) == 0) {
System.out.println("--> Indexed " + (i * BATCH) + "parent docs; took " + stopWatch.stop().lastTaskTime());
stopWatch.start();
}
}
int id = 0;
for (i = 1; i <= PARENT_COUNT; i *= 2) {
int parentId = 1;
for (int j = 0; j < i; j++) {
client.prepareIndex(indexName, "child", Integer.toString(id++))
.setParent(Integer.toString(parentId++))
.setSource(childSource(i))
.execute().actionGet();
}
}
System.out.println("--> Indexing took " + stopWatch.totalTime());
} catch (Exception e) {
System.out.println("--> Index already exists, ignoring indexing phase, waiting for green");
ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth(indexName).setWaitForGreenStatus().setTimeout("10m").execute().actionGet();
if (clusterHealthResponse.isTimedOut()) {
System.err.println("--> Timed out waiting for cluster health");
}
}
client.admin().indices().prepareRefresh().execute().actionGet();
System.out.println("--> Number of docs in index: " + client.prepareCount(indexName).setQuery(matchAllQuery()).execute().actionGet().getCount());
System.out.println("--> Running just child query");
// run just the child query, warm up first
for (int i = 1; i <= 10000; i *= 2) {
SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(matchQuery("child.field2", i)).execute().actionGet();
System.out.println("--> Warmup took["+ i +"]: " + searchResponse.getTook());
if (searchResponse.getHits().totalHits() != i) {
System.err.println("--> mismatch on hits");
}
}
NodesStatsResponse statsResponse = client.admin().cluster().prepareNodesStats()
.setJvm(true).execute().actionGet();
System.out.println("--> Committed heap size: " + statsResponse.getNodes()[0].getJvm().getMem().getHeapCommitted());
System.out.println("--> Used heap size: " + statsResponse.getNodes()[0].getJvm().getMem().getHeapUsed());
// run parent child constant query
for (int j = 1; j < QUERY_WARMUP; j *= 2) {
SearchResponse searchResponse = client.prepareSearch(indexName)
.setQuery(
hasChildQuery("child", matchQuery("field2", j))
)
.execute().actionGet();
if (searchResponse.getFailedShards() > 0) {
System.err.println("Search Failures " + Arrays.toString(searchResponse.getShardFailures()));
}
if (searchResponse.getHits().totalHits() != j) {
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.getHits().totalHits() + "], expected [" + PARENT_COUNT + "]");
}
}
long totalQueryTime = 0;
for (int i = 1; i < PARENT_COUNT; i *= 2) {
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch(indexName)
.setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchQuery("field2", i))))
.execute().actionGet();
if (searchResponse.getHits().totalHits() != i) {
System.err.println("--> mismatch on hits");
}
totalQueryTime += searchResponse.getTookInMillis();
}
System.out.println("--> has_child filter " + i +" Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
}
statsResponse = client.admin().cluster().prepareNodesStats()
.setJvm(true).setIndices(true).execute().actionGet();
System.out.println("--> Field data size: " + statsResponse.getNodes()[0].getIndices().getFieldData().getMemorySize());
System.out.println("--> Used heap size: " + statsResponse.getNodes()[0].getJvm().getMem().getHeapUsed());
totalQueryTime = 0;
for (int i = 1; i < PARENT_COUNT; i *= 2) {
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch(indexName)
.setQuery(hasChildQuery("child", matchQuery("field2", i)).scoreMode(ScoreMode.Max))
.execute().actionGet();
if (searchResponse.getHits().totalHits() != i) {
System.err.println("--> mismatch on hits");
}
totalQueryTime += searchResponse.getTookInMillis();
}
System.out.println("--> has_child query " + i +" Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
}
System.gc();
statsResponse = client.admin().cluster().prepareNodesStats()
.setJvm(true).setIndices(true).execute().actionGet();
System.out.println("--> Field data size: " + statsResponse.getNodes()[0].getIndices().getFieldData().getMemorySize());
System.out.println("--> Used heap size: " + statsResponse.getNodes()[0].getJvm().getMem().getHeapUsed());
client.close();
node1.close();
}
private static XContentBuilder parentSource(int val) throws IOException {
return jsonBuilder().startObject().field("field1", Integer.toString(val)).endObject();
}
private static XContentBuilder childSource(int val) throws IOException {
return jsonBuilder().startObject().field("field2", Integer.toString(val)).endObject();
}
}
| |
/*
* Copyright (C) 2015-2016 Emanuel Moecklin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.onegravity.rteditor;
import android.text.SpannableString;
import android.text.Spanned;
import android.text.style.CharacterStyle;
import android.text.style.ParagraphStyle;
import java.lang.reflect.Array;
/**
* Clones the Spannable part of an editor by copying the text, all
* CharacterStyle, and all ParagraphStyle spans to a new Spannable object
* (used for undo/redo).
* <p>
* The code is partly taken from the non-public class
* android.text.SpannableStringInternal.
*/
public class ClonedSpannableString extends SpannableString {
private Object[] mSpans;
private int[] mSpanData;
private int mSpanCount;
private static final int START = 0;
private static final int END = 1;
private static final int FLAGS = 2;
private static final int COLUMNS = 3;
public ClonedSpannableString(Spanned source) {
this((CharSequence) source);
}
public ClonedSpannableString(CharSequence source) {
super(source.toString()); // the toString is important to prevent the super class from copying the spans
init(source, 0, source.length());
}
private void init(CharSequence source, int start, int end) {
int initial = 20;
mSpans = new Object[initial];
mSpanData = new int[initial * 3];
if (source instanceof Spanned) {
Spanned sp = (Spanned) source;
for (Object span : sp.getSpans(start, end, Object.class)) {
if (span instanceof CharacterStyle || span instanceof ParagraphStyle) {
int st = sp.getSpanStart(span);
int en = sp.getSpanEnd(span);
int fl = sp.getSpanFlags(span);
if (st < start) st = start;
if (en > end) en = end;
setSpan(span, st - start, en - start, fl);
}
}
}
}
// ****************************************** SpannableString Methods *******************************************
@Override
public void setSpan(Object what, int start, int end, int flags) {
if (mSpanCount + 1 >= mSpans.length) {
int newsize = mSpanCount + 10;
Object[] newtags = new Object[newsize];
int[] newdata = new int[newsize * 3];
System.arraycopy(mSpans, 0, newtags, 0, mSpanCount);
System.arraycopy(mSpanData, 0, newdata, 0, mSpanCount * 3);
mSpans = newtags;
mSpanData = newdata;
}
mSpans[mSpanCount] = what;
mSpanData[mSpanCount * COLUMNS + START] = start;
mSpanData[mSpanCount * COLUMNS + END] = end;
mSpanData[mSpanCount * COLUMNS + FLAGS] = flags;
mSpanCount++;
}
@Override
public void removeSpan(Object what) {
int count = mSpanCount;
Object[] spans = mSpans;
int[] data = mSpanData;
for (int i = count - 1; i >= 0; i--) {
if (spans[i] == what) {
int c = count - (i + 1);
System.arraycopy(spans, i + 1, spans, i, c);
System.arraycopy(data, (i + 1) * COLUMNS,
data, i * COLUMNS, c * COLUMNS);
mSpanCount--;
return;
}
}
}
@Override
public int getSpanStart(Object what) {
int count = mSpanCount;
Object[] spans = mSpans;
int[] data = mSpanData;
for (int i = count - 1; i >= 0; i--) {
if (spans[i] == what) {
return data[i * COLUMNS + START];
}
}
return -1;
}
@Override
public int getSpanEnd(Object what) {
int count = mSpanCount;
Object[] spans = mSpans;
int[] data = mSpanData;
for (int i = count - 1; i >= 0; i--) {
if (spans[i] == what) {
return data[i * COLUMNS + END];
}
}
return -1;
}
@Override
public int getSpanFlags(Object what) {
int count = mSpanCount;
Object[] spans = mSpans;
int[] data = mSpanData;
for (int i = count - 1; i >= 0; i--) {
if (spans[i] == what) {
return data[i * COLUMNS + FLAGS];
}
}
return 0;
}
@Override
@SuppressWarnings("unchecked")
public <T> T[] getSpans(int queryStart, int queryEnd, Class<T> kind) {
int count = 0;
int spanCount = mSpanCount;
Object[] spans = mSpans;
int[] data = mSpanData;
Object[] ret = null;
Object ret1 = null;
for (int i = 0; i < spanCount; i++) {
if (kind != null && !kind.isInstance(spans[i])) {
continue;
}
int spanStart = data[i * COLUMNS + START];
int spanEnd = data[i * COLUMNS + END];
if (spanStart > queryEnd) {
continue;
}
if (spanEnd < queryStart) {
continue;
}
if (spanStart != spanEnd && queryStart != queryEnd) {
if (spanStart == queryEnd) {
continue;
}
if (spanEnd == queryStart) {
continue;
}
}
if (count == 0) {
ret1 = spans[i];
count++;
} else {
if (count == 1) {
ret = (Object[]) Array.newInstance(kind, spanCount - i + 1);
ret[0] = ret1;
}
int prio = data[i * COLUMNS + FLAGS] & Spanned.SPAN_PRIORITY;
if (prio != 0) {
int j;
for (j = 0; j < count; j++) {
int p = getSpanFlags(ret[j]) & Spanned.SPAN_PRIORITY;
if (prio > p) {
break;
}
}
System.arraycopy(ret, j, ret, j + 1, count - j);
ret[j] = spans[i];
count++;
} else {
ret[count++] = spans[i];
}
}
}
if (count == 0) {
return (T[]) Array.newInstance(kind, 0);
}
if (count == 1) {
ret = (Object[]) Array.newInstance(kind, 1);
ret[0] = ret1;
return (T[]) ret;
}
if (count == ret.length) {
return (T[]) ret;
}
Object[] nret = (Object[]) Array.newInstance(kind, count);
System.arraycopy(ret, 0, nret, 0, count);
return (T[]) nret;
}
@Override
@SuppressWarnings("rawtypes")
public int nextSpanTransition(int start, int limit, Class kind) {
int count = mSpanCount;
Object[] spans = mSpans;
int[] data = mSpanData;
if (kind == null) {
kind = Object.class;
}
for (int i = 0; i < count; i++) {
int st = data[i * COLUMNS + START];
int en = data[i * COLUMNS + END];
if (st > start && st < limit && kind.isInstance(spans[i]))
limit = st;
if (en > start && en < limit && kind.isInstance(spans[i]))
limit = en;
}
return limit;
}
}
| |
package net.catchpole.dom;
// Copyright 2014 catchpole.net
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import net.catchpole.model.Model;
import org.w3c.dom.CharacterData;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
import org.w3c.dom.UserDataHandler;
import java.util.ArrayList;
import java.util.List;
public class ModelNode implements Node, CharacterData {
protected ModelDocument modelDocument;
protected final Node proxyNode;
protected final Model backingModel;
protected final ModelNode parentNode;
protected List<Node> nodeList;
public ModelNode(ModelDocument modelDocument, ModelNode parentNode, String text) {
this(modelDocument, parentNode, null, modelDocument.createTextNode(text));
}
public ModelNode(ModelDocument modelDocument, ModelNode parentNode, Model backingModel) {
this(modelDocument, parentNode, backingModel,
modelDocument.createElement(modelDocument.getElementBuilder().getName(backingModel)));
}
public ModelNode(ModelDocument modelDocument, ModelNode parentNode, Model backingModel, Node proxyNode) {
this.modelDocument = modelDocument;
this.parentNode = parentNode;
this.backingModel = backingModel;
this.proxyNode = proxyNode;
}
private void resolveNodeList() {
if (nodeList == null && backingModel != null) {
nodeList = new ArrayList<Node>();
for (Model eachModel : backingModel) {
nodeList.add(new ModelElement(modelDocument, this, eachModel));
}
}
}
public void attachSingleNode(Node node) {
resolveNodeList();
nodeList.add(node);
}
public void addTextNode(String text) {
if (nodeList == null) {
nodeList = new ArrayList<Node>();
}
nodeList.add(new ModelNode(modelDocument, this, text));
}
public String toString() {
return this.getClass().getSimpleName() + ' ' + backingModel;
}
// node methods
public String getNodeName() {
return this.proxyNode.getNodeName();
}
public String getNodeValue() throws DOMException {
return this.proxyNode.getNodeValue();
}
public void setNodeValue(String nodeValue) throws DOMException {
this.proxyNode.setNodeValue(nodeValue);
}
public short getNodeType() {
return this.proxyNode.getNodeType();
}
public Node getParentNode() {
return this.parentNode;
}
public NodeList getChildNodes() {
resolveNodeList();
return new NodeList() {
public Node item(int index) {
return nodeList.get(index);
}
public int getLength() {
return nodeList.size();
}
};
}
public Node getFirstChild() {
resolveNodeList();
return nodeList.size() > 0 ? nodeList.get(0) : null;
}
public Node getLastChild() {
resolveNodeList();
return nodeList.size() > 0 ? nodeList.get(nodeList.size() - 1) : null;
}
public Node getPreviousSibling() {
List<Node> list = this.parentNode.nodeList;
int l = list.size();
for (int x = 0; x < l; x++) {
if (list.get(x) == this) {
if (x > 0) {
return list.get(x - 1);
}
}
}
return null;
}
public Node getNextSibling() {
List<Node> list = this.parentNode.nodeList;
int l = list.size();
for (int x = 0; x < l; x++) {
if (list.get(x) == this) {
if (x < (l - 1)) {
return list.get(x + 1);
}
}
}
return null;
}
public NamedNodeMap getAttributes() {
return this.proxyNode.getAttributes();
}
public Document getOwnerDocument() {
return this.proxyNode.getOwnerDocument();
}
public Node insertBefore(Node newChild, Node refChild) throws DOMException {
return this.proxyNode.insertBefore(newChild, refChild);
}
public Node replaceChild(Node newChild, Node oldChild) throws DOMException {
return this.proxyNode.replaceChild(newChild, oldChild);
}
public Node removeChild(Node oldChild) throws DOMException {
return this.proxyNode.removeChild(oldChild);
}
public Node appendChild(Node newChild) throws DOMException {
return this.proxyNode.appendChild(newChild);
}
public boolean hasChildNodes() {
resolveNodeList();
return this.nodeList.size() != 0;
}
public Node cloneNode(boolean deep) {
return this.proxyNode.cloneNode(deep);
}
public void normalize() {
this.proxyNode.normalize();
}
public boolean isSupported(String feature, String version) {
return this.proxyNode.isSupported(feature, version);
}
public String getNamespaceURI() {
return this.proxyNode.getNamespaceURI();
}
public String getPrefix() {
return this.proxyNode.getPrefix();
}
public void setPrefix(String prefix) throws DOMException {
this.proxyNode.setPrefix(prefix);
}
public String getLocalName() {
return this.proxyNode.getLocalName();
}
public boolean hasAttributes() {
return this.proxyNode.hasAttributes();
}
public String getBaseURI() {
return this.proxyNode.getBaseURI();
}
public short compareDocumentPosition(Node other) throws DOMException {
return this.proxyNode.compareDocumentPosition(proxyNode);
}
public String getTextContent() throws DOMException {
return this.proxyNode.getTextContent();
}
public void setTextContent(String textContent) throws DOMException {
this.proxyNode.setTextContent(textContent);
}
public boolean isSameNode(Node other) {
return this.proxyNode.isSameNode(other);
}
public String lookupPrefix(String namespaceURI) {
return this.proxyNode.lookupPrefix(namespaceURI);
}
public boolean isDefaultNamespace(String namespaceURI) {
return this.proxyNode.isDefaultNamespace(namespaceURI);
}
public String lookupNamespaceURI(String prefix) {
return this.proxyNode.lookupNamespaceURI(prefix);
}
public boolean isEqualNode(Node arg) {
return this.proxyNode.isEqualNode(arg);
}
public Object getFeature(String feature, String version) {
return this.proxyNode.getFeature(feature, version);
}
public Object setUserData(String key, Object data, UserDataHandler handler) {
return this.proxyNode.setUserData(key, data, handler);
}
public Object getUserData(String key) {
return this.proxyNode.getUserData(key);
}
//
public String getData() throws DOMException {
return ((Text)proxyNode).getData();
}
public void setData(String s) throws DOMException {
((Text)proxyNode).setData(s);
}
public int getLength() {
return getData().length();
}
public String substringData(int i, int i1) throws DOMException {
return getData().substring(i,i1);
}
public void appendData(String s) throws DOMException {
setData(getData()+s);
}
public void insertData(int i, String s) throws DOMException {
throw new IllegalArgumentException("screw you buddy");
}
public void deleteData(int i, int i1) throws DOMException {
throw new IllegalArgumentException("screw you buddy");
}
public void replaceData(int i, int i1, String s) throws DOMException {
throw new IllegalArgumentException("screw you buddy");
}
}
| |
package io.clickhandler.reactGwt.generator;
import java.util.ArrayList;
import java.util.List;
/**
*
*/
public class DOMGenerator {
public static void main(String[] args) {
final List<Tag> tags = new ArrayList<>();
tags.add(new Tag("a", "AnchorElement"));
tags.add(new Tag("abbr", "AnchorElement"));
tags.add(new Tag("address", "Element"));
tags.add(new Tag("area", "AreaElement"));
tags.add(new Tag("article", "Element"));
tags.add(new Tag("aside", "Element"));
tags.add(new Tag("audio", "AudioElement"));
tags.add(new Tag("b", "Element"));
tags.add(new Tag("base", "BaseElement"));
tags.add(new Tag("bdi", "Element"));
tags.add(new Tag("bdo", "Element"));
tags.add(new Tag("big", "Element"));
tags.add(new Tag("blockquote", "Element"));
tags.add(new Tag("body", "BodyElement"));
tags.add(new Tag("br", "BRElement"));
tags.add(new Tag("button", "ButtonElement"));
tags.add(new Tag("canvas", "CanvasElement"));
tags.add(new Tag("caption", "TableCaptionElement"));
tags.add(new Tag("cite", "Element"));
tags.add(new Tag("code", "Element"));
tags.add(new Tag("col", "TableColElement"));
tags.add(new Tag("colgroup", "Element"));
tags.add(new Tag("data", "Element"));
tags.add(new Tag("datalist", "Element"));
tags.add(new Tag("dd", "Element"));
tags.add(new Tag("del", "Element"));
tags.add(new Tag("details", "DetailsElement"));
tags.add(new Tag("dfn", "Element"));
tags.add(new Tag("dialog", "Element"));
tags.add(new Tag("div", "DivElement"));
tags.add(new Tag("dl", "DListElement"));
tags.add(new Tag("dt", "Element"));
tags.add(new Tag("em", "Element"));
tags.add(new Tag("embed", "EmbedElement"));
tags.add(new Tag("fieldset", "FieldSetElement"));
tags.add(new Tag("figcaption", "Element"));
tags.add(new Tag("figure", "Element"));
tags.add(new Tag("footer", "Element"));
tags.add(new Tag("form", "FormElement"));
tags.add(new Tag("h1", "HeadingElement"));
tags.add(new Tag("h2", "HeadingElement"));
tags.add(new Tag("h3", "HeadingElement"));
tags.add(new Tag("h4", "HeadingElement"));
tags.add(new Tag("h5", "HeadingElement"));
tags.add(new Tag("h6", "HeadingElement"));
tags.add(new Tag("head", "HeadElement"));
tags.add(new Tag("header", "Element"));
tags.add(new Tag("hr", "HRElement"));
tags.add(new Tag("html", "HtmlElement"));
tags.add(new Tag("i", "Element"));
tags.add(new Tag("iframe", "IFrameElement"));
tags.add(new Tag("img", "ImageElement"));
tags.add(new Tag("input", "InputElement"));
tags.add(new Tag("ins", "Element"));
tags.add(new Tag("kbd", "Element"));
tags.add(new Tag("keygen", "KeygenElement"));
tags.add(new Tag("label", "LabelElement"));
tags.add(new Tag("legend", "LegendElement"));
tags.add(new Tag("li", "LIElement"));
tags.add(new Tag("link", "LinkElement"));
tags.add(new Tag("main", "Element"));
tags.add(new Tag("map", "MapElement"));
tags.add(new Tag("mark", "MapElement"));
tags.add(new Tag("menu", "MenuElement"));
tags.add(new Tag("menuitem", "Element"));
tags.add(new Tag("meta", "MetaElement"));
tags.add(new Tag("meter", "MeterElement"));
tags.add(new Tag("nav", "Element"));
tags.add(new Tag("noscript", "Element"));
tags.add(new Tag("object", "ObjectElement"));
tags.add(new Tag("ol", "OListElement"));
tags.add(new Tag("optgroup", "OptGroupElement"));
tags.add(new Tag("option", "OptionElement"));
tags.add(new Tag("output", "OutputElement"));
tags.add(new Tag("p", "ParagraphElement"));
tags.add(new Tag("param", "ParamElement"));
tags.add(new Tag("picture", "Element"));
tags.add(new Tag("pre", "PreElement"));
tags.add(new Tag("progress", "ProgressElement"));
tags.add(new Tag("q", "QuoteElement"));
tags.add(new Tag("rp", "Element"));
tags.add(new Tag("rt", "Element"));
tags.add(new Tag("ruby", "Element"));
tags.add(new Tag("s", "Element"));
tags.add(new Tag("samp", "Element"));
tags.add(new Tag("script", "ScriptElement"));
tags.add(new Tag("section", "TableSectionElement"));
tags.add(new Tag("select", "SelectElement"));
tags.add(new Tag("small", "Element"));
tags.add(new Tag("source", "SourceElement"));
tags.add(new Tag("span", "SpanElement"));
tags.add(new Tag("strong", "Element"));
tags.add(new Tag("style", "StyleElement"));
tags.add(new Tag("sub", "Element"));
tags.add(new Tag("summary", "Element"));
tags.add(new Tag("sup", "Element"));
tags.add(new Tag("table", "TableElement"));
tags.add(new Tag("tbody", "TableSectionElement"));
tags.add(new Tag("td", "TableCellElement"));
tags.add(new Tag("textarea", "TextAreaElement"));
tags.add(new Tag("tfoot", "Element"));
tags.add(new Tag("th", "TableCellElement"));
tags.add(new Tag("thead", "TableSectionElement"));
tags.add(new Tag("time", "Element"));
tags.add(new Tag("title", "TitleElement"));
tags.add(new Tag("tr", "TableRowElement"));
tags.add(new Tag("track", "TrackElement"));
tags.add(new Tag("u", "Element"));
tags.add(new Tag("ul", "UListElement"));
tags.add(new Tag("var", "Element"));
tags.add(new Tag("video", "VideoElement"));
tags.add(new Tag("wbr", "Element"));
final StringBuilder sb = new StringBuilder();
for (Tag tag : tags) {
// tag.generateDefault(sb);
tag.generate(sb);
}
System.out.println(sb.toString());
}
public static class Tag {
public String name;
public String elementClassName;
public Tag(String name, String elementClassName) {
this.name = name;
this.elementClassName = elementClassName;
}
public void generate(StringBuilder sb) {
sb.append("static ReactElement ").append(name).append("() {\n");
sb.append("return create(\"").append(name).append("\");\n");
sb.append("}\n");
sb.append("static ReactElement ").append(name).append("(String value) {\n");
sb.append("return create(\"").append(name).append("\", value);\n");
sb.append("}\n");
sb.append("static ReactElement ").append(name).append("(Object... children) {\n");
sb.append("return create(\"").append(name).append("\", children);\n");
sb.append("}\n");
sb.append("static ReactElement ").append(name).append("(Func.Run1<HTMLProps<").append(elementClassName).append(">> callback, String html) {\n");
sb.append("return create(\"").append(name).append("\", callback, html);\n");
sb.append("}\n");
sb.append("static ReactElement ").append(name).append("(HTMLProps<").append(elementClassName).append("> props) {\n");
sb.append("return create(\"").append(name).append("\", props);\n");
sb.append("}\n");
sb.append("static ReactElement ").append(name).append("(HTMLProps<").append(elementClassName).append("> props, String html) {\n");
sb.append("return create(\"").append(name).append("\", props, html);\n");
sb.append("}\n");
sb.append("static ReactElement ").append(name).append("(HTMLProps<").append(elementClassName).append("> props, Object... children) {\n");
sb.append("return create(\"").append(name).append("\", props, children);\n");
sb.append("}\n");
sb.append("static ReactElement ").append(name).append("(Func.Run1<HTMLProps<").append(elementClassName).append(">> callback, Func.Run1<ChildList> childrenCallback) {\n");
sb.append("return create(\"").append(name).append("\", callback, childrenCallback);\n");
sb.append("}\n");
sb.append("static ReactElement ").append(name).append("(Func.Run1<HTMLProps<").append(elementClassName).append(">> callback) {\n");
sb.append("return create(\"").append(name).append("\", callback);\n");
sb.append("}\n");
sb.append("static ReactElement ").append(name).append("(Func.Run1<HTMLProps<").append(elementClassName).append(">> callback, Object... children) {\n");
sb.append("return create(\"").append(name).append("\", callback, children);\n");
sb.append("}\n");
}
public void generateDefault(StringBuilder sb) {
sb.append("default ReactElement ").append(name).append("(String value) {\n");
sb.append("return create(\"").append(name).append("\", value);\n");
sb.append("}\n");
sb.append("default ReactElement ").append(name).append("(Object... children) {\n");
sb.append("return create(\"").append(name).append("\", children);\n");
sb.append("}\n");
sb.append("default ReactElement ").append(name).append("(Func.Run1<HTMLProps<").append(elementClassName).append(">> callback, String html) {\n");
sb.append("return create(\"").append(name).append("\", callback, html);\n");
sb.append("}\n");
sb.append("default ReactElement ").append(name).append("(HTMLProps<").append(elementClassName).append("> props, String html) {\n");
sb.append("return create(\"").append(name).append("\", props, html);\n");
sb.append("}\n");
sb.append("default ReactElement ").append(name).append("(HTMLProps<").append(elementClassName).append("> props, Object... children) {\n");
sb.append("return create(\"").append(name).append("\", props, children);\n");
sb.append("}\n");
sb.append("default ReactElement ").append(name).append("(Func.Run1<HTMLProps<").append(elementClassName).append(">> callback, Func.Run1<List<Object>> childrenCallback) {\n");
sb.append("return create(\"").append(name).append("\", callback, childrenCallback);\n");
sb.append("}\n");
sb.append("default ReactElement ").append(name).append("(Func.Run1<HTMLProps<").append(elementClassName).append(">> callback, Object... children) {\n");
sb.append("return create(\"").append(name).append("\", callback, children);\n");
sb.append("}\n");
}
}
}
| |
/*
* Copyright 2014-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wallerlab.yoink.adaptive.smooth;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Resource;
import javax.xml.bind.JAXBElement;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.wallerlab.yoink.api.model.bootstrap.Job;
import org.wallerlab.yoink.api.model.bootstrap.JobParameter;
import org.wallerlab.yoink.api.model.molecule.Atom;
import org.wallerlab.yoink.api.model.molecule.Coord;
import org.wallerlab.yoink.api.model.molecule.Molecule;
import org.wallerlab.yoink.api.model.region.Region;
import org.wallerlab.yoink.api.service.Calculator;
import org.wallerlab.yoink.api.service.adaptive.SmoothFunction;
import org.wallerlab.yoink.api.service.adaptive.Smoothner;
import org.wallerlab.yoink.math.map.MapSorter;
import org.wallerlab.yoink.math.set.Subsets;
import com.google.common.primitives.Ints;
/**
* this class is to get weight factor in SCMP. for details please see:
* "Size-Consistent Multipartitioning QM/MM: A Stable and Efficient Adaptive
* QM/MM Method"
*
* @author Min Zheng
*
*/
@Service("scmpWeightFactors")
public class SCMPWeightFactors implements Smoothner {
@Resource
private Calculator<Double, Coord, Molecule> closestDistanceToMoleculeCalculator;
@Resource
@Qualifier("buloSmoothFunction")
private SmoothFunction buloSmoothFunction;// for QM
@Resource
@Qualifier("scmpSmoothFunction")
private SmoothFunction scmpSmoothFunction;// for MM
private Map<List<Integer>, Double> sigmaIndexMap;
@Value("${yoink.job.debug}")
private boolean debug = false;
/**
* use smooth factors to calculate the weight factors.
*
* @param job
* -parameters and results in job
*/
public void smooth(Job<JAXBElement> job) {
// initialize
getWeightForAllPartitioningConfiguration(job);
int partitionNumber = (int) job.getParameters().get(
JobParameter.NUMBER_PARTITION);
double sigmas[] = new double[partitionNumber];
List<List<Integer>> qmSets = new ArrayList<List<Integer>>();
Map<List<Integer>, Double> molecularIndicesAndWeightFactor = getWeightForSelectedPartitioningConfiguration(
partitionNumber, sigmas, qmSets);
job.getProperties().put("weightfactors",
molecularIndicesAndWeightFactor);
}
private void getWeightForAllPartitioningConfiguration(Job<JAXBElement> job) {
Map<JobParameter, Object> parameters = job.getParameters();
Map<Molecule, Integer> bufferMoleculeMap = job.getRegions()
.get(Region.Name.BUFFER).getMolecularMap();
List<Molecule> bufferMolecules = new ArrayList<Molecule>(
bufferMoleculeMap.keySet());
List<Integer> bufferIndices = new ArrayList<Integer>(
bufferMoleculeMap.values());
double s_qm_out = (double) parameters
.get(JobParameter.DISTANCE_S_QM_OUT);
double t_qm_out = (double) parameters
.get(JobParameter.DISTANCE_T_QM_OUT);
double s_qm_in = (double) parameters.get(JobParameter.DISTANCE_S_QM_IN);
double t_qm_in = (double) parameters.get(JobParameter.DISTANCE_T_QM_IN);
double s_mm_out = (double) parameters
.get(JobParameter.DISTANCE_S_MM_OUT);
double t_mm_out = (double) parameters
.get(JobParameter.DISTANCE_T_MM_OUT);
double s_mm_in = (double) parameters.get(JobParameter.DISTANCE_S_MM_IN);
double t_mm_in = (double) parameters.get(JobParameter.DISTANCE_T_MM_IN);
sigmaIndexMap = new HashMap<List<Integer>, Double>();
sigmaIndexMap = Collections.synchronizedMap(sigmaIndexMap);
int qmNumber = (int) parameters.get(JobParameter.NUMBER_QM);
int number_qmInBuffer = qmNumber - qmNumber * 2 / 3;
Map<Region.Name, Region> regions = job.getRegions();
Coord centerCoord = regions.get(Region.Name.QM_CORE).getCenterOfMass();
calculateWeightForEachConfiguration(bufferMolecules, bufferIndices,
s_qm_out, t_qm_out, s_qm_in, t_qm_in, s_mm_out, t_mm_out,
s_mm_in, t_mm_in, number_qmInBuffer, centerCoord);
}
private void calculateWeightForEachConfiguration(
List<Molecule> bufferMolecules, List<Integer> bufferIndices,
double s_qm_out, double t_qm_out, double s_qm_in, double t_qm_in,
double s_mm_out, double t_mm_out, double s_mm_in, double t_mm_in,
int number_qmInBuffer, Coord centerCoord) {
if(debug){
System.out.println("before: SCMPWeightFactors Subsets.split(Ints.toArray(bufferIndices), number_qmInBuffer))"+System.currentTimeMillis());
}
Subsets.split(Ints.toArray(bufferIndices), number_qmInBuffer)
.parallelStream()
.forEach(
qmSet -> {
Set<Integer> mmSet = new HashSet<Integer>(
bufferIndices);
mmSet.removeAll(qmSet);
double sigma = calculateSigma(centerCoord,
bufferMolecules, bufferIndices, s_qm_out,
t_qm_out, s_qm_in, t_qm_in, s_mm_out,
t_mm_out, s_mm_in, t_mm_in, qmSet, mmSet);
sigmaIndexMap.put(qmSet, sigma);
});
if(debug){
System.out.println("before: SCMPWeightFactors Subsets.split(Ints.toArray(bufferIndices), number_qmInBuffer))"+System.currentTimeMillis());
}
}
private Map<List<Integer>, Double> getWeightForSelectedPartitioningConfiguration(
int partitionNumber, double[] sigmas, List<List<Integer>> qmSets) {
Map<List<Integer>, Double> sortedSigmaIndexMap = MapSorter
.sortByValue(sigmaIndexMap);
List<List<Integer>> sortedIndices = new ArrayList<List<Integer>>(
sortedSigmaIndexMap.keySet());
List<Double> sortedSigmas = new ArrayList<Double>(
sortedSigmaIndexMap.values());
double sum_sigmas = 0;
List<Double> subSigmas = sortedSigmas.subList(sortedSigmas.size()
- partitionNumber, sortedSigmas.size());
List<List<Integer>> subIndices = sortedIndices.subList(
sortedSigmas.size() - partitionNumber, sortedSigmas.size());
for (int num = 0; num < subSigmas.size(); num++) {
double sigma = subSigmas.get(num);
sigmas[num] = sigma;
List<Integer> qmSet = subIndices.get(num);
qmSets.add(qmSet);
sum_sigmas += sigma;
}
Map<List<Integer>, Double> molecularIndicesAndWeightFactor = new HashMap<List<Integer>, Double>();
for (int i = 0; i < sigmas.length; i++) {
sigmas[i] = sigmas[i] / sum_sigmas;
molecularIndicesAndWeightFactor.put(qmSets.get(i), sigmas[i]);
}
return molecularIndicesAndWeightFactor;
}
private double calculateSigma(Coord centerCoord,
List<Molecule> bufferMolecules, List<Integer> bufferIndices,
double s_qm_out, double t_qm_out, double s_qm_in, double t_qm_in,
double s_mm_out, double t_mm_out, double s_mm_in, double t_mm_in,
ArrayList<Integer> qmSet, Set<Integer> mmSet) {
double fadeOutQM = fadeQM(centerCoord, bufferMolecules, bufferIndices,
s_qm_out, t_qm_out, qmSet);
double fadeInQM = 1 - fadeQM(centerCoord, bufferMolecules,
bufferIndices, s_qm_in, t_qm_in, qmSet);
double fadeOutMM = fadeMM(centerCoord, bufferMolecules, bufferIndices,
s_mm_out, t_mm_out, mmSet);
double fadeInMM = 1 - fadeMM(centerCoord, bufferMolecules,
bufferIndices, s_mm_in, t_mm_in, mmSet);
double sigma = fadeOutQM * fadeOutMM * fadeInQM * fadeInMM;
return sigma;
}
private double fadeQM(Coord centerCoord, List<Molecule> bufferMolecules,
List<Integer> bufferIndices, double s_qm_out, double t_qm_out,
List<Integer> qmSet) {
double fadeQM = 1.0;
for (Integer molecularIndex : qmSet) {
int index = bufferIndices.indexOf(molecularIndex);
Molecule molecule = bufferMolecules.get(index);
double currentDistance = closestDistanceToMoleculeCalculator
.calculate(centerCoord, molecule);
double lamdba = scmpSmoothFunction.evaluate(currentDistance,
s_qm_out, t_qm_out);
fadeQM *= lamdba;
}
return fadeQM;
}
private double fadeMM(Coord centerCoord, List<Molecule> bufferMolecules,
List<Integer> bufferIndices, double s_qm_out, double t_qm_out,
Set<Integer> mmSet) {
double fadeMM = 1.0;
for (Integer molecularIndex : mmSet) {
int index = bufferIndices.indexOf(molecularIndex);
Molecule molecule = bufferMolecules.get(index);
double currentDistance = closestDistanceToMoleculeCalculator
.calculate(centerCoord, molecule);
double lamdba = buloSmoothFunction.evaluate(currentDistance,
s_qm_out, t_qm_out);
fadeMM *= lamdba;
}
return fadeMM;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.batch.protocol.models;
import java.util.UUID;
import com.microsoft.rest.DateTimeRfc1123;
import org.joda.time.DateTime;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Additional parameters for reactivate operation.
*/
public class TaskReactivateOptions {
/**
* The maximum time that the server can spend processing the request, in
* seconds. The default is 30 seconds.
*/
@JsonProperty(value = "")
private Integer timeout;
/**
* The caller-generated request identity, in the form of a GUID with no
* decoration such as curly braces, e.g.
* 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
*/
@JsonProperty(value = "")
private UUID clientRequestId;
/**
* Whether the server should return the client-request-id in the response.
*/
@JsonProperty(value = "")
private Boolean returnClientRequestId;
/**
* The time the request was issued. Client libraries typically set this to
* the current system clock time; set it explicitly if you are calling the
* REST API directly.
*/
@JsonProperty(value = "")
private DateTimeRfc1123 ocpDate;
/**
* An ETag value associated with the version of the resource known to the
* client. The operation will be performed only if the resource's current
* ETag on the service exactly matches the value specified by the client.
*/
@JsonProperty(value = "")
private String ifMatch;
/**
* An ETag value associated with the version of the resource known to the
* client. The operation will be performed only if the resource's current
* ETag on the service does not match the value specified by the client.
*/
@JsonProperty(value = "")
private String ifNoneMatch;
/**
* A timestamp indicating the last modified time of the resource known to
* the client. The operation will be performed only if the resource on the
* service has been modified since the specified time.
*/
@JsonProperty(value = "")
private DateTimeRfc1123 ifModifiedSince;
/**
* A timestamp indicating the last modified time of the resource known to
* the client. The operation will be performed only if the resource on the
* service has not been modified since the specified time.
*/
@JsonProperty(value = "")
private DateTimeRfc1123 ifUnmodifiedSince;
/**
* Get the maximum time that the server can spend processing the request, in seconds. The default is 30 seconds.
*
* @return the timeout value
*/
public Integer timeout() {
return this.timeout;
}
/**
* Set the maximum time that the server can spend processing the request, in seconds. The default is 30 seconds.
*
* @param timeout the timeout value to set
* @return the TaskReactivateOptions object itself.
*/
public TaskReactivateOptions withTimeout(Integer timeout) {
this.timeout = timeout;
return this;
}
/**
* Get the caller-generated request identity, in the form of a GUID with no decoration such as curly braces, e.g. 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
*
* @return the clientRequestId value
*/
public UUID clientRequestId() {
return this.clientRequestId;
}
/**
* Set the caller-generated request identity, in the form of a GUID with no decoration such as curly braces, e.g. 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
*
* @param clientRequestId the clientRequestId value to set
* @return the TaskReactivateOptions object itself.
*/
public TaskReactivateOptions withClientRequestId(UUID clientRequestId) {
this.clientRequestId = clientRequestId;
return this;
}
/**
* Get whether the server should return the client-request-id in the response.
*
* @return the returnClientRequestId value
*/
public Boolean returnClientRequestId() {
return this.returnClientRequestId;
}
/**
* Set whether the server should return the client-request-id in the response.
*
* @param returnClientRequestId the returnClientRequestId value to set
* @return the TaskReactivateOptions object itself.
*/
public TaskReactivateOptions withReturnClientRequestId(Boolean returnClientRequestId) {
this.returnClientRequestId = returnClientRequestId;
return this;
}
/**
* Get the time the request was issued. Client libraries typically set this to the current system clock time; set it explicitly if you are calling the REST API directly.
*
* @return the ocpDate value
*/
public DateTime ocpDate() {
if (this.ocpDate == null) {
return null;
}
return this.ocpDate.dateTime();
}
/**
* Set the time the request was issued. Client libraries typically set this to the current system clock time; set it explicitly if you are calling the REST API directly.
*
* @param ocpDate the ocpDate value to set
* @return the TaskReactivateOptions object itself.
*/
public TaskReactivateOptions withOcpDate(DateTime ocpDate) {
if (ocpDate == null) {
this.ocpDate = null;
} else {
this.ocpDate = new DateTimeRfc1123(ocpDate);
}
return this;
}
/**
* Get an ETag value associated with the version of the resource known to the client. The operation will be performed only if the resource's current ETag on the service exactly matches the value specified by the client.
*
* @return the ifMatch value
*/
public String ifMatch() {
return this.ifMatch;
}
/**
* Set an ETag value associated with the version of the resource known to the client. The operation will be performed only if the resource's current ETag on the service exactly matches the value specified by the client.
*
* @param ifMatch the ifMatch value to set
* @return the TaskReactivateOptions object itself.
*/
public TaskReactivateOptions withIfMatch(String ifMatch) {
this.ifMatch = ifMatch;
return this;
}
/**
* Get an ETag value associated with the version of the resource known to the client. The operation will be performed only if the resource's current ETag on the service does not match the value specified by the client.
*
* @return the ifNoneMatch value
*/
public String ifNoneMatch() {
return this.ifNoneMatch;
}
/**
* Set an ETag value associated with the version of the resource known to the client. The operation will be performed only if the resource's current ETag on the service does not match the value specified by the client.
*
* @param ifNoneMatch the ifNoneMatch value to set
* @return the TaskReactivateOptions object itself.
*/
public TaskReactivateOptions withIfNoneMatch(String ifNoneMatch) {
this.ifNoneMatch = ifNoneMatch;
return this;
}
/**
* Get a timestamp indicating the last modified time of the resource known to the client. The operation will be performed only if the resource on the service has been modified since the specified time.
*
* @return the ifModifiedSince value
*/
public DateTime ifModifiedSince() {
if (this.ifModifiedSince == null) {
return null;
}
return this.ifModifiedSince.dateTime();
}
/**
* Set a timestamp indicating the last modified time of the resource known to the client. The operation will be performed only if the resource on the service has been modified since the specified time.
*
* @param ifModifiedSince the ifModifiedSince value to set
* @return the TaskReactivateOptions object itself.
*/
public TaskReactivateOptions withIfModifiedSince(DateTime ifModifiedSince) {
if (ifModifiedSince == null) {
this.ifModifiedSince = null;
} else {
this.ifModifiedSince = new DateTimeRfc1123(ifModifiedSince);
}
return this;
}
/**
* Get a timestamp indicating the last modified time of the resource known to the client. The operation will be performed only if the resource on the service has not been modified since the specified time.
*
* @return the ifUnmodifiedSince value
*/
public DateTime ifUnmodifiedSince() {
if (this.ifUnmodifiedSince == null) {
return null;
}
return this.ifUnmodifiedSince.dateTime();
}
/**
* Set a timestamp indicating the last modified time of the resource known to the client. The operation will be performed only if the resource on the service has not been modified since the specified time.
*
* @param ifUnmodifiedSince the ifUnmodifiedSince value to set
* @return the TaskReactivateOptions object itself.
*/
public TaskReactivateOptions withIfUnmodifiedSince(DateTime ifUnmodifiedSince) {
if (ifUnmodifiedSince == null) {
this.ifUnmodifiedSince = null;
} else {
this.ifUnmodifiedSince = new DateTimeRfc1123(ifUnmodifiedSince);
}
return this;
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package sokoban.Tethik;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
/**
*
* @author figgefred
*/
public class LiveAnalyser {
private Map<BoardState, Map<CorralArea, List<CorralArea>>> CachedAreas;
// private Map<BoardState, Map<BoardPosition, Boolean>> DeadlockCache;
private Settings settings;
public LiveAnalyser(Settings settings)
{
this.settings = settings;
if(settings.DO_CORRAL_CACHING)
CachedAreas = new HashMap<>();
// if(Player.DO_EXPENSIVE_DEADLOCK)
// DeadlockCache = new HashMap<>();
}
private List<CorralArea> getCachedArea(BoardState state)
{
if(!settings.DO_CORRAL_CACHING)
return null;
Map<CorralArea, List<CorralArea>> map = CachedAreas.get(state);
if(map != null)
{
for(CorralArea key : map.keySet())
{
if(key.isMemberOfArea(state.getPlayerNode()))
{
return map.get(key);
}
}
}
return null;
}
/**
* Returns a list of corral areas and (or only) the area of where the
* player is.
*
* A corral area is an area which a player cannot reach except by pushing
* a block.
*
* CorralArea's will hold a field pointing to a list of blocks that are acting
* as fences (creating the corrol fence). These should be prioritized so that
* a corral area can be solved as fast as possible. This is so called 'corrol pruning'.
*
* @param board
* @return
*/
public List<CorralArea> getAreas(BoardState board)
{
List<CorralArea> list = null;
if(settings.DO_CORRAL_CACHING)
{
list = getCachedArea(board);
if(list != null)
{
return list;
}
}
// A list containing all the nodes "visited"
Set<BoardPosition> visited = new HashSet<>();
list = new ArrayList<>();
int areaCounter = 1;
//**************************//
// Iterpret areas from map //
//*********************** //
for(int r = 0; r < board.getRowsCount(); r++)
{
for(int c = 0; c < board.getColumnsCount(); c++)
{
BoardPosition p = new BoardPosition(r,c);
NodeType nodeType = board.get(p);
if( !visited.contains(p) && nodeType.isSpaceNode() )
{
CorralArea area = new CorralArea(areaCounter++, board);
setCorralArea(board, area, p, visited);
list.add(area);
}
}
}
// If there is only a play area there is no point to continue
if(list.size() <= 1)
return list;
//**************************//
// Find the corral fences //
//*********************** //
// !!!!!!!!!!!!!!
// The following code segments can probably be implemented in a better way.
// !!!!!!!!!!!!!!
// A list of block nodes that are fence candidates for the corral areas
Map<BoardPosition, CorralFenceCandidate> fenceCandidates = new HashMap<>();
// This part is crucial, we must find out which blocks are 'touched'
// by more then 1 area
for(CorralArea a: list)
{
//for(BoardPosition p: board.getBlockNodes())
for(BoardPosition p: a.getAreaPositions())
{
if(board.get(p).isBlockNode())
{
// For every a try to match every block
CorralFenceCandidate c = fenceCandidates.get(p);
if(c == null)
{
c = new CorralFenceCandidate(p);
fenceCandidates.put(p, c);
}
c.addCorralArea(a);
}
}
}
//******************************//
// Add the fences to the area **//
//******************************//
// Naive merge
CorralArea playArea = null;
for(CorralFenceCandidate f : fenceCandidates.values())
{
for(CorralArea a: list)
{
if(f.isNodeOf(a))
{
if(f.isPartOfCorralAreaFence())
{
a.addAsFenceNode(f.getBoardPosition(), board.get(f.getBoardPosition()));
}
else
{
a.add(f.getBoardPosition(), board.get(f.getBoardPosition()));
}
}
if(!a.isCorralArea())
{
playArea = a;
}
}
}
if(playArea == null)
System.err.println("Oooops, no playarea warning.");
if(settings.DO_CORRAL_CACHING)
{
Map<CorralArea, List<CorralArea>> map = CachedAreas.get(board);
if(map == null)
{
map = new HashMap<>();
CachedAreas.put(board, map);
}
map.put(playArea, list);
}
return list;
}
private void setCorralArea(BoardState board, CorralArea area, BoardPosition spaceNode, Set<BoardPosition> visited)
{
NodeType nodeType = board.get(spaceNode);
area.add(spaceNode, nodeType);
visited.add(spaceNode);
for(BoardPosition neighbour: board.getNeighbours(spaceNode))
{
NodeType neighbourType = board.get(neighbour);
if( !visited.contains(neighbour) && ( neighbourType.isSpaceNode()) )
{
// If unvisited expand!
setCorralArea(board, area, neighbour, visited);
}
else if(neighbourType.isBlockNode())
{
//area.add(neighbour, neighbourType);
visitNeighbouringBlocks(board, area, neighbour, new HashSet<BoardPosition>());
}
}
}
private void visitNeighbouringBlocks(BoardState board, CorralArea area, BoardPosition blockNode, Set<BoardPosition> visited)
{
NodeType nodeType = board.get(blockNode);
area.add(blockNode, nodeType);
visited.add(blockNode);
for(BoardPosition blockNeighbour: board.getNeighbours(blockNode))
{
NodeType neighbourType = board.get(blockNeighbour);
if( !visited.contains(blockNeighbour) && ( neighbourType.isBlockNode()) )
{
// If unvisited expand!
visitNeighbouringBlocks(board, area, blockNeighbour, visited);
}
}
}
public static void main(String[] args) throws IOException, InterruptedException {
// BoardState board = BoardState.getBoardFromFile("testing/simpleplaytest4");
Vector<String> b = new Vector<String>();
BufferedReader br = new BufferedReader(
new InputStreamReader(System.in));
String line;
line = br.readLine();
while(line != null) {
if(line.equals(""))
break;
b.add(line);
line = br.readLine();
} // End while
//System.out.println(board);
BoardState board = new BoardState(b, true);
PathFinder pFinder = new PathFinder();
LiveAnalyser liveAnalyser = new LiveAnalyser(new Settings());
board.setSettings(new Settings());
Analyser ana = new Analyser(board, new Settings());
List<CorralArea> areas = liveAnalyser.getAreas(board);
for(CorralArea a: areas)
{
System.out.println(a);
}
// Player noob = new Player(board);
//System.out.println(noob.play());
}
//
// public boolean isFrozenDeadlockState(BoardState state, Set<BoardPosition> tmpBlock, BoardPosition block)
// {
// if(DeadlockCache == null)
// DeadlockCache = new HashMap<>();
// Map<BoardPosition, Boolean> map = DeadlockCache.get(state);
// if(map != null)
// {
// Boolean b = map.get(state);
// if(b != null)
// return b.booleanValue();
// }
//
// int r = block.Row;
// int c = block.Column;
//
// BoardPosition left = new BoardPosition(r, c-1);
// BoardPosition right = new BoardPosition(r, c+1);
// BoardPosition up = new BoardPosition(r-1, c);
// BoardPosition down = new BoardPosition(r+1, c);
//
// //***************//
// // WALL BLOCKING //
// //***************//
//
// // Check if there are any walls blocking horizontally
// // Or of course if there is any block already checked that is blocking
// boolean horizontalWallBlocking = (left.Column >= 0 && state.get(left) == NodeType.WALL)
// || (right.Column < state.getColumnsCount() && state.get(right) == NodeType.WALL);
//
//
//
//
// // Check same as above just vertical
// boolean verticalWallBlocking = (up.Row >= 0 && state.get(up) == NodeType.WALL)
// || (down.Row < state.getRowsCount() && state.get(down) == NodeType.WALL);
//
//
// // Can block be moved?
// // Note we have to iterate over all blocks to check one of them
// // is not a goal, since it could otherwise have been a good win state
// if(horizontalWallBlocking && verticalWallBlocking)
// {
// tmpBlock.add(block);
// for(BoardPosition pos: tmpBlock)
// {
// if(state.get(pos)==NodeType.BLOCK)
// return true;
// }
// return false;
// }
//
// //*****************//
// // BLOCKS DEADLOCK //
// //**************** //
//
//
// // If blocking is both horizontal and vertical, well then the block is supposedly frozen
// // But is it deadlocked?
//
// // Mark this block as checked - avoid stackoverflow
// tmpBlock.add(block);
// boolean deadlockState =
// (
// (verticalWallBlocking)
// ||
// (up.Row >= 0 && isBlockType(state.get(up)) && (tmpBlock.contains(up) || isFrozenDeadlockState(state, tmpBlock, up)))
// ||
// (down.Row < state.getRowsCount() && isBlockType(state.get(down)) && (tmpBlock.contains(down) || isFrozenDeadlockState(state, tmpBlock, down)))
// )
// &&
// (
// (horizontalWallBlocking)
// ||
// (left.Column >= 0 && isBlockType(state.get(left)) && (tmpBlock.contains(left) || isFrozenDeadlockState(state, tmpBlock, left)))
// ||
// (right.Column < state.getColumnsCount() && isBlockType(state.get(right)) && (tmpBlock.contains(right) || isFrozenDeadlockState(state, tmpBlock, right)))
// );
//
// map = DeadlockCache.get(state);
// if(map == null)
// {
// map = new HashMap<>();
// DeadlockCache.put(state, map);
// }
// map.put(block, deadlockState);
//
// return deadlockState;
// }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.CoordinatedStateManager;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.AsyncClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.locking.EntityLock;
import org.apache.hadoop.hbase.executor.ExecutorService;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
import org.apache.hadoop.hbase.mob.MobFileCache;
import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
import org.apache.hadoop.hbase.quotas.RegionSizeStore;
import org.apache.hadoop.hbase.regionserver.FlushRequester;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HeapMemoryManager;
import org.apache.hadoop.hbase.regionserver.LeaseManager;
import org.apache.hadoop.hbase.regionserver.MetricsRegionServer;
import org.apache.hadoop.hbase.regionserver.RegionServerAccounting;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.ReplicationSourceService;
import org.apache.hadoop.hbase.regionserver.SecureBulkLoadManager;
import org.apache.hadoop.hbase.regionserver.ServerNonceManager;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequester;
import org.apache.hadoop.hbase.regionserver.regionreplication.RegionReplicationBufferManager;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.security.access.AccessChecker;
import org.apache.hadoop.hbase.security.access.ZKPermissionWatcher;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
import org.apache.hbase.thirdparty.com.google.protobuf.Service;
import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearSlowLogResponseRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearSlowLogResponses;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactionSwitchRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactionSwitchResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse;
/**
* A mock RegionServer implementation.
* Use this when you can't bend Mockito to your liking (e.g. return null result
* when 'scanning' until master timesout and then return a coherent meta row
* result thereafter. Have some facility for faking gets and scans. See
* setGetResult(byte[], byte[], Result) for how to fill the backing data
* store that the get pulls from.
*/
class MockRegionServer implements AdminProtos.AdminService.BlockingInterface,
ClientProtos.ClientService.BlockingInterface, RegionServerServices {
private final ServerName sn;
private final ZKWatcher zkw;
private final Configuration conf;
/**
* Map of regions to map of rows and {@link Result}. Used as data source when
* {@link #get(RpcController, ClientProtos.GetRequest)} is called. Because we have a byte
* key, need to use TreeMap and provide a Comparator. Use
* {@link #setGetResult(byte[], byte[], Result)} filling this map.
*/
private final Map<byte [], Map<byte [], Result>> gets = new TreeMap<>(Bytes.BYTES_COMPARATOR);
/**
* Map of regions to results to return when scanning.
*/
private final Map<byte [], Result []> nexts = new TreeMap<>(Bytes.BYTES_COMPARATOR);
/**
* Data structure that holds regionname and index used scanning.
*/
class RegionNameAndIndex {
private final byte[] regionName;
private int index = 0;
RegionNameAndIndex(final byte[] regionName) {
this.regionName = regionName;
}
byte[] getRegionName() {
return this.regionName;
}
int getThenIncrement() {
int currentIndex = this.index;
this.index++;
return currentIndex;
}
}
/**
* Outstanding scanners and their offset into <code>nexts</code>
*/
private final Map<Long, RegionNameAndIndex> scannersAndOffsets = new HashMap<>();
/**
* @param sn Name of this mock regionserver
* @throws IOException
* @throws org.apache.hadoop.hbase.ZooKeeperConnectionException
*/
MockRegionServer(final Configuration conf, final ServerName sn)
throws ZooKeeperConnectionException, IOException {
this.sn = sn;
this.conf = conf;
this.zkw = new ZKWatcher(conf, sn.toString(), this, true);
}
/**
* Use this method filling the backing data source used by
* {@link #get(RpcController, ClientProtos.GetRequest)}
* @param regionName the region name to assign
* @param row the row key
* @param r the single row result
*/
void setGetResult(final byte [] regionName, final byte [] row, final Result r) {
Map<byte [], Result> value = this.gets.get(regionName);
if (value == null) {
// If no value already, create one. Needs to be treemap because we are
// using byte array as key. Not thread safe.
value = new TreeMap<>(Bytes.BYTES_COMPARATOR);
this.gets.put(regionName, value);
}
value.put(row, r);
}
/**
* Use this method to set what a scanner will reply as we next through
* @param regionName
* @param rs
*/
void setNextResults(final byte [] regionName, final Result [] rs) {
this.nexts.put(regionName, rs);
}
@Override
public boolean isStopped() {
return false;
}
@Override
public void abort(String why, Throwable e) {
throw new RuntimeException(this.sn + ": " + why, e);
}
@Override
public boolean isAborted() {
return false;
}
public long openScanner(byte[] regionName, Scan scan) throws IOException {
long scannerId = ThreadLocalRandom.current().nextLong();
this.scannersAndOffsets.put(scannerId, new RegionNameAndIndex(regionName));
return scannerId;
}
public Result next(long scannerId) throws IOException {
RegionNameAndIndex rnai = this.scannersAndOffsets.get(scannerId);
int index = rnai.getThenIncrement();
Result [] results = this.nexts.get(rnai.getRegionName());
if (results == null) return null;
return index < results.length? results[index]: null;
}
public Result [] next(long scannerId, int numberOfRows) throws IOException {
// Just return one result whatever they ask for.
Result r = next(scannerId);
return r == null? null: new Result [] {r};
}
public void close(final long scannerId) throws IOException {
this.scannersAndOffsets.remove(scannerId);
}
@Override
public void stop(String why) {
this.zkw.close();
}
@Override
public void addRegion(HRegion r) {
}
@Override
public boolean removeRegion(HRegion r, ServerName destination) {
return false;
}
@Override
public HRegion getRegion(String encodedRegionName) {
return null;
}
@Override
public Configuration getConfiguration() {
return this.conf;
}
@Override
public ZKWatcher getZooKeeper() {
return this.zkw;
}
@Override
public CoordinatedStateManager getCoordinatedStateManager() {
return null;
}
@Override
public Connection getConnection() {
return null;
}
@Override
public ServerName getServerName() {
return this.sn;
}
@Override
public boolean isStopping() {
return false;
}
@Override
public FlushRequester getFlushRequester() {
return null;
}
@Override
public CompactionRequester getCompactionRequestor() {
return null;
}
@Override
public RegionServerAccounting getRegionServerAccounting() {
return null;
}
@Override
public RegionServerRpcQuotaManager getRegionServerRpcQuotaManager() {
return null;
}
@Override
public void postOpenDeployTasks(PostOpenDeployContext context) throws IOException {
}
@Override
public RpcServerInterface getRpcServer() {
return null;
}
@Override
public ConcurrentSkipListMap<byte[], Boolean> getRegionsInTransitionInRS() {
return null;
}
@Override
public FileSystem getFileSystem() {
return null;
}
@Override
public GetResponse get(RpcController controller, GetRequest request)
throws ServiceException {
byte[] regionName = request.getRegion().getValue().toByteArray();
Map<byte [], Result> m = this.gets.get(regionName);
GetResponse.Builder builder = GetResponse.newBuilder();
if (m != null) {
byte[] row = request.getGet().getRow().toByteArray();
builder.setResult(ProtobufUtil.toResult(m.get(row)));
}
return builder.build();
}
@Override
public MutateResponse mutate(RpcController controller, MutateRequest request)
throws ServiceException {
return null;
}
@Override
public ScanResponse scan(RpcController controller, ScanRequest request)
throws ServiceException {
ScanResponse.Builder builder = ScanResponse.newBuilder();
try {
if (request.hasScan()) {
byte[] regionName = request.getRegion().getValue().toByteArray();
builder.setScannerId(openScanner(regionName, null));
builder.setMoreResults(true);
}
else {
long scannerId = request.getScannerId();
Result result = next(scannerId);
if (result != null) {
builder.addCellsPerResult(result.size());
List<CellScannable> results = new ArrayList<>(1);
results.add(result);
((HBaseRpcController) controller).setCellScanner(CellUtil
.createCellScanner(results));
builder.setMoreResults(true);
}
else {
builder.setMoreResults(false);
close(scannerId);
}
}
} catch (IOException ie) {
throw new ServiceException(ie);
}
return builder.build();
}
@Override
public BulkLoadHFileResponse bulkLoadHFile(RpcController controller,
BulkLoadHFileRequest request) throws ServiceException {
return null;
}
@Override
public ClientProtos.CoprocessorServiceResponse execService(RpcController controller,
ClientProtos.CoprocessorServiceRequest request) throws ServiceException {
return null;
}
@Override
public ClientProtos.MultiResponse multi(
RpcController controller, MultiRequest request) throws ServiceException {
return null;
}
@Override
public GetRegionInfoResponse getRegionInfo(RpcController controller,
GetRegionInfoRequest request) throws ServiceException {
GetRegionInfoResponse.Builder builder = GetRegionInfoResponse.newBuilder();
builder.setRegionInfo(ProtobufUtil.toRegionInfo(RegionInfoBuilder.FIRST_META_REGIONINFO));
return builder.build();
}
@Override
public GetRegionLoadResponse getRegionLoad(RpcController controller,
GetRegionLoadRequest request) throws ServiceException {
GetRegionLoadResponse.Builder builder = GetRegionLoadResponse.newBuilder();
return builder.build();
}
@Override
public ClearCompactionQueuesResponse clearCompactionQueues(RpcController controller,
ClearCompactionQueuesRequest request) throws ServiceException {
return null;
}
@Override
public GetStoreFileResponse getStoreFile(RpcController controller,
GetStoreFileRequest request) throws ServiceException {
return null;
}
@Override
public GetOnlineRegionResponse getOnlineRegion(RpcController controller,
GetOnlineRegionRequest request) throws ServiceException {
return null;
}
@Override
public List<HRegion> getRegions() {
return null;
}
@Override
public OpenRegionResponse openRegion(RpcController controller,
OpenRegionRequest request) throws ServiceException {
return null;
}
@Override
public WarmupRegionResponse warmupRegion(RpcController controller,
WarmupRegionRequest request) throws ServiceException {
return null;
}
@Override
public CloseRegionResponse closeRegion(RpcController controller,
CloseRegionRequest request) throws ServiceException {
return null;
}
@Override
public FlushRegionResponse flushRegion(RpcController controller,
FlushRegionRequest request) throws ServiceException {
return null;
}
@Override
public CompactionSwitchResponse compactionSwitch(RpcController controller,
CompactionSwitchRequest request) throws ServiceException {
return null;
}
@Override
public CompactRegionResponse compactRegion(RpcController controller,
CompactRegionRequest request) throws ServiceException {
return null;
}
@Override
public ReplicateWALEntryResponse replicateWALEntry(RpcController controller,
ReplicateWALEntryRequest request) throws ServiceException {
return null;
}
@Override
public RollWALWriterResponse rollWALWriter(RpcController controller,
RollWALWriterRequest request) throws ServiceException {
return null;
}
@Override
public GetServerInfoResponse getServerInfo(RpcController controller,
GetServerInfoRequest request) throws ServiceException {
return null;
}
@Override
public StopServerResponse stopServer(RpcController controller,
StopServerRequest request) throws ServiceException {
return null;
}
@Override
public List<HRegion> getRegions(TableName tableName) throws IOException {
return null;
}
@Override
public LeaseManager getLeaseManager() {
return null;
}
@Override
public List<WAL> getWALs() throws IOException {
return Collections.emptyList();
}
@Override
public WAL getWAL(RegionInfo regionInfo) throws IOException {
return null;
}
@Override
public ExecutorService getExecutorService() {
return null;
}
@Override
public ChoreService getChoreService() {
return null;
}
@Override
public void updateRegionFavoredNodesMapping(String encodedRegionName,
List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName> favoredNodes) {
}
@Override
public InetSocketAddress[] getFavoredNodesForRegion(String encodedRegionName) {
return null;
}
@Override
public ReplicateWALEntryResponse
replay(RpcController controller, ReplicateWALEntryRequest request)
throws ServiceException {
return null;
}
@Override
public UpdateFavoredNodesResponse updateFavoredNodes(RpcController controller,
UpdateFavoredNodesRequest request) throws ServiceException {
return null;
}
@Override
public ServerNonceManager getNonceManager() {
return null;
}
@Override
public boolean reportRegionStateTransition(RegionStateTransitionContext context) {
return false;
}
@Override
public boolean registerService(Service service) {
return false;
}
@Override
public CoprocessorServiceResponse execRegionServerService(RpcController controller,
CoprocessorServiceRequest request) throws ServiceException {
return null;
}
@Override
public UpdateConfigurationResponse updateConfiguration(
RpcController controller, UpdateConfigurationRequest request)
throws ServiceException {
return null;
}
@Override
public ClearRegionBlockCacheResponse clearRegionBlockCache(RpcController controller,
ClearRegionBlockCacheRequest request)
throws ServiceException {
return null;
}
@Override
public HeapMemoryManager getHeapMemoryManager() {
return null;
}
@Override
public double getCompactionPressure() {
return 0;
}
@Override
public ThroughputController getFlushThroughputController() {
return null;
}
@Override
public double getFlushPressure() {
return 0;
}
@Override
public MetricsRegionServer getMetrics() {
return null;
}
@Override
public EntityLock regionLock(List<RegionInfo> regionInfos, String description, Abortable abort)
throws IOException {
return null;
}
@Override
public PrepareBulkLoadResponse prepareBulkLoad(RpcController controller,
PrepareBulkLoadRequest request) throws ServiceException {
return null;
}
@Override
public CleanupBulkLoadResponse cleanupBulkLoad(RpcController controller,
CleanupBulkLoadRequest request) throws ServiceException {
return null;
}
@Override
public SecureBulkLoadManager getSecureBulkLoadManager() {
return null;
}
@Override
public void unassign(byte[] regionName) throws IOException {
}
@Override
public RegionServerSpaceQuotaManager getRegionServerSpaceQuotaManager() {
return null;
}
@Override
public ExecuteProceduresResponse executeProcedures(RpcController controller,
ExecuteProceduresRequest request) throws ServiceException {
return null;
}
@Override
public ClearSlowLogResponses clearSlowLogsResponses(RpcController controller,
ClearSlowLogResponseRequest request) throws ServiceException {
return null;
}
@Override
public HBaseProtos.LogEntry getLogEntries(RpcController controller,
HBaseProtos.LogRequest request) throws ServiceException {
return null;
}
@Override
public GetSpaceQuotaSnapshotsResponse getSpaceQuotaSnapshots(
RpcController controller, GetSpaceQuotaSnapshotsRequest request)
throws ServiceException {
return null;
}
@Override
public Connection createConnection(Configuration conf) throws IOException {
return null;
}
@Override
public boolean reportRegionSizesForQuotas(RegionSizeStore sizeStore) {
return true;
}
@Override
public boolean reportFileArchivalForQuotas(
TableName tableName, Collection<Entry<String, Long>> archivedFiles) {
return false;
}
public boolean isClusterUp() {
return true;
}
@Override
public ReplicationSourceService getReplicationSourceService() {
return null;
}
@Override
public TableDescriptors getTableDescriptors() {
return null;
}
@Override
public Optional<BlockCache> getBlockCache() {
return Optional.empty();
}
@Override
public Optional<MobFileCache> getMobFileCache() {
return Optional.empty();
}
@Override
public AccessChecker getAccessChecker() {
return null;
}
@Override
public ZKPermissionWatcher getZKPermissionWatcher() {
return null;
}
@Override
public AsyncClusterConnection getAsyncClusterConnection() {
return null;
}
@Override
public RegionReplicationBufferManager getRegionReplicationBufferManager() {
return null;
}
@Override
public ReplicateWALEntryResponse replicateToReplica(RpcController controller,
ReplicateWALEntryRequest request) throws ServiceException {
return null;
}
}
| |
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.List;
import java.util.StringTokenizer;
import java.util.TreeSet;
// >>> Don't Change
public class TopTitles extends Configured implements Tool {
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new TopTitles(), args);
System.exit(res);
}
@Override
public int run(String[] args) throws Exception {
Configuration conf = this.getConf();
FileSystem fs = FileSystem.get(conf);
Path tmpPath = new Path("/mp2/tmp");
fs.delete(tmpPath, true);
Job jobA = Job.getInstance(conf, "Title Count");
jobA.setOutputKeyClass(Text.class);
jobA.setOutputValueClass(IntWritable.class);
jobA.setMapperClass(TitleCountMap.class);
jobA.setReducerClass(TitleCountReduce.class);
FileInputFormat.setInputPaths(jobA, new Path(args[0]));
FileOutputFormat.setOutputPath(jobA, tmpPath);
jobA.setJarByClass(TopTitles.class);
jobA.waitForCompletion(true);
Job jobB = Job.getInstance(conf, "Top Titles");
jobB.setOutputKeyClass(Text.class);
jobB.setOutputValueClass(IntWritable.class);
jobB.setMapOutputKeyClass(NullWritable.class);
jobB.setMapOutputValueClass(TextArrayWritable.class);
jobB.setMapperClass(TopTitlesMap.class);
jobB.setReducerClass(TopTitlesReduce.class);
jobB.setNumReduceTasks(1);
FileInputFormat.setInputPaths(jobB, tmpPath);
FileOutputFormat.setOutputPath(jobB, new Path(args[1]));
jobB.setInputFormatClass(KeyValueTextInputFormat.class);
jobB.setOutputFormatClass(TextOutputFormat.class);
jobB.setJarByClass(TopTitles.class);
return jobB.waitForCompletion(true) ? 0 : 1;
}
public static String readHDFSFile(String path, Configuration conf) throws IOException{
Path pt=new Path(path);
FileSystem fs = FileSystem.get(pt.toUri(), conf);
FSDataInputStream file = fs.open(pt);
BufferedReader buffIn=new BufferedReader(new InputStreamReader(file));
StringBuilder everything = new StringBuilder();
String line;
while( (line = buffIn.readLine()) != null) {
everything.append(line);
everything.append("\n");
}
return everything.toString();
}
public static class TextArrayWritable extends ArrayWritable {
public TextArrayWritable() {
super(Text.class);
}
public TextArrayWritable(String[] strings) {
super(Text.class);
Text[] texts = new Text[strings.length];
for (int i = 0; i < strings.length; i++) {
texts[i] = new Text(strings[i]);
}
set(texts);
}
}
// <<< Don't Change
public static class TitleCountMap extends Mapper<Object, Text, Text, IntWritable> {
List<String> stopWords;
String delimiters;
@Override
protected void setup(Context context) throws IOException,InterruptedException {
Configuration conf = context.getConfiguration();
String stopWordsPath = conf.get("stopwords");
String delimitersPath = conf.get("delimiters");
this.stopWords = Arrays.asList(readHDFSFile(stopWordsPath, conf).split("\n"));
this.delimiters = readHDFSFile(delimitersPath, conf);
}
@Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
// TODO
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line, this.delimiters);
while (tokenizer.hasMoreTokens()) {
String nextToken = tokenizer.nextToken();
nextToken = nextToken.toLowerCase();
if (!this.stopWords.contains(nextToken)){
context.write(new Text(nextToken), new IntWritable(1));
}
}
}
}
public static class TitleCountReduce extends Reducer<Text, IntWritable, Text, IntWritable> {
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
// TODO
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
context.write(key, new IntWritable(sum));
}
}
public static class TopTitlesMap extends Mapper<Text, Text, NullWritable, TextArrayWritable> {
Integer N;
// TODO
private TreeSet<Pair<Integer, String>> countTopTitleMap = new TreeSet<Pair<Integer, String>>();
@Override
protected void setup(Context context) throws IOException,InterruptedException {
Configuration conf = context.getConfiguration();
this.N = conf.getInt("N", 10);
}
@Override
public void map(Text key, Text value, Context context) throws IOException, InterruptedException {
// TODO
Integer count = Integer.parseInt(value.toString());
String word = key.toString();
countTopTitleMap.add(new Pair<Integer, String>(count, word));
if (countTopTitleMap.size() > this.N) {
countTopTitleMap.remove(countTopTitleMap.first());
}
}
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
// TODO
for (Pair<Integer, String> item : countTopTitleMap) {
String[] strings = {item.second, item.first.toString()};
TextArrayWritable val = new TextArrayWritable(strings);
context.write(NullWritable.get(), val);
}
}
}
public static class TopTitlesReduce extends Reducer<NullWritable, TextArrayWritable, Text, IntWritable> {
Integer N;
// TODO
private TreeSet<Pair<Integer, String>> countTopTitleMap = new TreeSet<Pair<Integer, String>>();
@Override
protected void setup(Context context) throws IOException,InterruptedException {
Configuration conf = context.getConfiguration();
this.N = conf.getInt("N", 10);
}
@Override
public void reduce(NullWritable key, Iterable<TextArrayWritable> values, Context context) throws IOException, InterruptedException {
// TODO
for (TextArrayWritable val: values) {
Text[] pair= (Text[]) val.toArray();
String word = pair[0].toString();
Integer count = Integer.parseInt(pair[1].toString());
countTopTitleMap.add(new Pair<Integer, String>(count, word));
if (countTopTitleMap.size() > this.N) {
countTopTitleMap.remove(countTopTitleMap.first());
}
}
for (Pair<Integer, String> item: countTopTitleMap) {
Text word = new Text(item.second);
IntWritable value = new IntWritable(item.first);
context.write(word, value);
}
}
}
}
// >>> Don't Change
class Pair<A extends Comparable<? super A>,
B extends Comparable<? super B>>
implements Comparable<Pair<A, B>> {
public final A first;
public final B second;
public Pair(A first, B second) {
this.first = first;
this.second = second;
}
public static <A extends Comparable<? super A>,
B extends Comparable<? super B>>
Pair<A, B> of(A first, B second) {
return new Pair<A, B>(first, second);
}
@Override
public int compareTo(Pair<A, B> o) {
int cmp = o == null ? 1 : (this.first).compareTo(o.first);
return cmp == 0 ? (this.second).compareTo(o.second) : cmp;
}
@Override
public int hashCode() {
return 31 * hashcode(first) + hashcode(second);
}
private static int hashcode(Object o) {
return o == null ? 0 : o.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Pair))
return false;
if (this == obj)
return true;
return equal(first, ((Pair<?, ?>) obj).first)
&& equal(second, ((Pair<?, ?>) obj).second);
}
private boolean equal(Object o1, Object o2) {
return o1 == o2 || (o1 != null && o1.equals(o2));
}
@Override
public String toString() {
return "(" + first + ", " + second + ')';
}
}
// <<< Don't Change
| |
/* Copyright 2020 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openkilda.persistence.ferma.repositories;
import static com.google.common.collect.Sets.newHashSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.openkilda.model.ConnectedDeviceType.ARP;
import static org.openkilda.model.ConnectedDeviceType.LLDP;
import org.openkilda.model.Switch;
import org.openkilda.model.SwitchConnectedDevice;
import org.openkilda.model.SwitchId;
import org.openkilda.persistence.inmemory.InMemoryGraphBasedTest;
import org.openkilda.persistence.repositories.SwitchConnectedDeviceRepository;
import org.openkilda.persistence.repositories.SwitchRepository;
import org.junit.Before;
import org.junit.Test;
import java.time.Instant;
import java.util.Collection;
import java.util.Optional;
public class FermaSwitchConnectedDevicesRepositoryTest extends InMemoryGraphBasedTest {
static final SwitchId FIRST_SWITCH_ID = new SwitchId("01");
static final SwitchId SECOND_SWITCH_ID = new SwitchId("02");
static final int FIRST_PORT_NUMBER = 1;
static final int SECOND_PORT_NUMBER = 2;
static final int FIRST_VLAN = 1;
static final int SECOND_VLAN = 2;
static final String FIRST_FLOW_ID = "first_flow";
static final String SECOND_FLOW_ID = "second_flow";
static final String MAC_ADDRESS_1 = "00:00:00:00:00:00:00:01";
static final String MAC_ADDRESS_2 = "00:00:00:00:00:00:00:02";
static final String IP_ADDRESS_1 = "192.168.1.1";
static final String IP_ADDRESS_2 = "192.168.2.2";
static final String CHASSIS_ID = "00:00:00:00:00:00:00:03";
static final String PORT_ID = "123";
static final int TTL = 120;
static final String PORT = "some_port";
static final String SYSTEM_NAME = "ubuntu";
static final String SYSTEM_DESCRIPTION = "desc";
static final String CAPABILITIES = "capabilities";
static final String MANAGEMENT_ADDRESS = "127.0.0.1";
static final Instant TIME_FIRST_SEEN = Instant.now().minusSeconds(10);
static final Instant TIME_LAST_SEEN = Instant.now();
Switch firstSwitch;
Switch secondSwitch;
SwitchConnectedDevice lldpConnectedDeviceA;
SwitchConnectedDevice lldpConnectedDeviceB;
SwitchConnectedDevice arpConnectedDeviceC;
SwitchConnectedDevice arpConnectedDeviceD;
SwitchRepository switchRepository;
SwitchConnectedDeviceRepository connectedDeviceRepository;
@Before
public void setUp() {
switchRepository = repositoryFactory.createSwitchRepository();
connectedDeviceRepository = repositoryFactory.createSwitchConnectedDeviceRepository();
firstSwitch = createTestSwitch(FIRST_SWITCH_ID.getId());
secondSwitch = createTestSwitch(SECOND_SWITCH_ID.getId());
lldpConnectedDeviceA = new SwitchConnectedDevice(
firstSwitch, FIRST_PORT_NUMBER, FIRST_VLAN, FIRST_FLOW_ID, true, MAC_ADDRESS_1, LLDP, null, CHASSIS_ID,
PORT_ID, TTL, PORT, SYSTEM_NAME, SYSTEM_DESCRIPTION, CAPABILITIES, MANAGEMENT_ADDRESS, TIME_FIRST_SEEN,
TIME_LAST_SEEN);
lldpConnectedDeviceB = new SwitchConnectedDevice(
secondSwitch, FIRST_PORT_NUMBER, FIRST_VLAN, SECOND_FLOW_ID, false, MAC_ADDRESS_1, LLDP, null,
CHASSIS_ID, PORT_ID, TTL, PORT, SYSTEM_NAME, SYSTEM_DESCRIPTION, CAPABILITIES, MANAGEMENT_ADDRESS,
TIME_FIRST_SEEN, TIME_LAST_SEEN);
arpConnectedDeviceC = new SwitchConnectedDevice(
secondSwitch, SECOND_PORT_NUMBER, SECOND_VLAN, null, null, MAC_ADDRESS_2, ARP, IP_ADDRESS_1, null, null,
TTL, PORT, SYSTEM_NAME, SYSTEM_DESCRIPTION, CAPABILITIES, MANAGEMENT_ADDRESS, TIME_FIRST_SEEN,
TIME_LAST_SEEN);
arpConnectedDeviceD = new SwitchConnectedDevice(
secondSwitch, SECOND_PORT_NUMBER, SECOND_VLAN, SECOND_FLOW_ID, null, MAC_ADDRESS_2, ARP, IP_ADDRESS_2,
null, null, TTL, PORT, SYSTEM_NAME, SYSTEM_DESCRIPTION, CAPABILITIES, MANAGEMENT_ADDRESS,
TIME_FIRST_SEEN, TIME_LAST_SEEN);
}
@Test
public void createConnectedDeviceTest() {
connectedDeviceRepository.add(lldpConnectedDeviceA);
Collection<SwitchConnectedDevice> devices = connectedDeviceRepository.findAll();
assertEquals(lldpConnectedDeviceA, devices.iterator().next());
assertNotNull(devices.iterator().next().getSwitchObj());
}
@Test
public void deleteConnectedDeviceTest() {
transactionManager.doInTransaction(() -> {
connectedDeviceRepository.add(lldpConnectedDeviceA);
connectedDeviceRepository.add(lldpConnectedDeviceB);
assertEquals(2, connectedDeviceRepository.findAll().size());
connectedDeviceRepository.remove(lldpConnectedDeviceA);
assertEquals(1, connectedDeviceRepository.findAll().size());
assertEquals(lldpConnectedDeviceB, connectedDeviceRepository.findAll().iterator().next());
connectedDeviceRepository.remove(lldpConnectedDeviceB);
assertEquals(0, connectedDeviceRepository.findAll().size());
});
}
@Test
public void findBySwitchIdTest() {
connectedDeviceRepository.add(lldpConnectedDeviceA);
connectedDeviceRepository.add(lldpConnectedDeviceB);
connectedDeviceRepository.add(arpConnectedDeviceC);
connectedDeviceRepository.add(arpConnectedDeviceD);
Collection<SwitchConnectedDevice> firstSwitchDevices = connectedDeviceRepository
.findBySwitchId(FIRST_SWITCH_ID);
assertEquals(1, firstSwitchDevices.size());
assertEquals(lldpConnectedDeviceA, firstSwitchDevices.iterator().next());
Collection<SwitchConnectedDevice> secondFlowDevices = connectedDeviceRepository
.findBySwitchId(SECOND_SWITCH_ID);
assertEquals(3, secondFlowDevices.size());
assertEquals(newHashSet(lldpConnectedDeviceB, arpConnectedDeviceC, arpConnectedDeviceD),
newHashSet(secondFlowDevices));
}
@Test
public void findByFlowIdTest() {
connectedDeviceRepository.add(lldpConnectedDeviceA);
connectedDeviceRepository.add(lldpConnectedDeviceB);
connectedDeviceRepository.add(arpConnectedDeviceC);
connectedDeviceRepository.add(arpConnectedDeviceD);
Collection<SwitchConnectedDevice> firstDevice = connectedDeviceRepository.findByFlowId(FIRST_FLOW_ID);
assertEquals(1, firstDevice.size());
assertEquals(lldpConnectedDeviceA, firstDevice.iterator().next());
Collection<SwitchConnectedDevice> secondDevices = connectedDeviceRepository.findByFlowId(SECOND_FLOW_ID);
assertEquals(2, secondDevices.size());
assertEquals(newHashSet(lldpConnectedDeviceB, arpConnectedDeviceD), newHashSet(secondDevices));
}
@Test
public void findByLldpUniqueFields() {
connectedDeviceRepository.add(lldpConnectedDeviceA);
connectedDeviceRepository.add(lldpConnectedDeviceB);
connectedDeviceRepository.add(arpConnectedDeviceC);
runFindByLldpUniqueFields(lldpConnectedDeviceA);
runFindByLldpUniqueFields(lldpConnectedDeviceB);
runFindByLldpUniqueFields(arpConnectedDeviceC);
assertFalse(connectedDeviceRepository.findLldpByUniqueFieldCombination(
firstSwitch.getSwitchId(), 999, 999, "fake", CHASSIS_ID, PORT_ID).isPresent());
}
private void runFindByLldpUniqueFields(SwitchConnectedDevice device) {
Optional<SwitchConnectedDevice> foundDevice = connectedDeviceRepository.findLldpByUniqueFieldCombination(
device.getSwitchId(), device.getPortNumber(), device.getVlan(), device.getMacAddress(),
device.getChassisId(), device.getPortId());
if (LLDP.equals(device.getType())) {
assertTrue(foundDevice.isPresent());
assertEquals(device, foundDevice.get());
} else {
assertFalse(foundDevice.isPresent());
}
}
@Test
public void findByArpUniqueFields() {
connectedDeviceRepository.add(lldpConnectedDeviceA);
connectedDeviceRepository.add(arpConnectedDeviceC);
connectedDeviceRepository.add(arpConnectedDeviceD);
runFindByArpUniqueFields(lldpConnectedDeviceA);
runFindByArpUniqueFields(arpConnectedDeviceC);
runFindByArpUniqueFields(arpConnectedDeviceD);
assertFalse(connectedDeviceRepository.findLldpByUniqueFieldCombination(
firstSwitch.getSwitchId(), 999, 999, "fake", CHASSIS_ID, PORT_ID).isPresent());
}
private void runFindByArpUniqueFields(SwitchConnectedDevice device) {
Optional<SwitchConnectedDevice> foundDevice = connectedDeviceRepository.findArpByUniqueFieldCombination(
device.getSwitchId(), device.getPortNumber(), device.getVlan(), device.getMacAddress(),
device.getIpAddress());
if (ARP.equals(device.getType())) {
assertTrue(foundDevice.isPresent());
assertEquals(device, foundDevice.get());
} else {
assertFalse(foundDevice.isPresent());
}
}
}
| |
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package scouter.org.objectweb.asm;
/**
* A visitor to visit a Java method. The methods of this class must be called in
* the following order: ( <tt>visitParameter</tt> )* [
* <tt>visitAnnotationDefault</tt> ] ( <tt>visitAnnotation</tt> |
* <tt>visitTypeAnnotation</tt> | <tt>visitAttribute</tt> )* [
* <tt>visitCode</tt> ( <tt>visitFrame</tt> | <tt>visit<i>X</i>Insn</tt> |
* <tt>visitLabel</tt> | <tt>visitInsnAnnotation</tt> |
* <tt>visitTryCatchBlock</tt> | <tt>visitTryCatchBlockAnnotation</tt> |
* <tt>visitLocalVariable</tt> | <tt>visitLocalVariableAnnotation</tt> |
* <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ] <tt>visitEnd</tt>. In
* addition, the <tt>visit<i>X</i>Insn</tt> and <tt>visitLabel</tt> methods must
* be called in the sequential order of the bytecode instructions of the visited
* code, <tt>visitInsnAnnotation</tt> must be called <i>after</i> the annotated
* instruction, <tt>visitTryCatchBlock</tt> must be called <i>before</i> the
* labels passed as arguments have been visited,
* <tt>visitTryCatchBlockAnnotation</tt> must be called <i>after</i> the
* corresponding try catch block has been visited, and the
* <tt>visitLocalVariable</tt>, <tt>visitLocalVariableAnnotation</tt> and
* <tt>visitLineNumber</tt> methods must be called <i>after</i> the labels
* passed as arguments have been visited.
*
* @author Eric Bruneton
*/
public abstract class MethodVisitor {
/**
* The ASM API version implemented by this visitor. The value of this field
* must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
protected final int api;
/**
* The method visitor to which this visitor must delegate method calls. May
* be null.
*/
protected MethodVisitor mv;
/**
* Constructs a new {@link MethodVisitor}.
*
* @param api
* the ASM API version implemented by this visitor. Must be one
* of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
public MethodVisitor(final int api) {
this(api, null);
}
/**
* Constructs a new {@link MethodVisitor}.
*
* @param api
* the ASM API version implemented by this visitor. Must be one
* of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param mv
* the method visitor to which this visitor must delegate method
* calls. May be null.
*/
public MethodVisitor(final int api, final MethodVisitor mv) {
if (api != Opcodes.ASM4 && api != Opcodes.ASM5) {
throw new IllegalArgumentException();
}
this.api = api;
this.mv = mv;
}
// -------------------------------------------------------------------------
// Parameters, annotations and non standard attributes
// -------------------------------------------------------------------------
/**
* Visits a parameter of this method.
*
* @param name
* parameter name or null if none is provided.
* @param access
* the parameter's access flags, only <tt>ACC_FINAL</tt>,
* <tt>ACC_SYNTHETIC</tt> or/and <tt>ACC_MANDATED</tt> are
* allowed (see {@link Opcodes}).
*/
public void visitParameter(String name, int access) {
if (api < Opcodes.ASM5) {
throw new RuntimeException();
}
if (mv != null) {
mv.visitParameter(name, access);
}
}
/**
* Visits the default value of this annotation interface method.
*
* @return a visitor to the visit the actual default value of this
* annotation interface method, or <tt>null</tt> if this visitor is
* not interested in visiting this default value. The 'name'
* parameters passed to the methods of this annotation visitor are
* ignored. Moreover, exacly one visit method must be called on this
* annotation visitor, followed by visitEnd.
*/
public AnnotationVisitor visitAnnotationDefault() {
if (mv != null) {
return mv.visitAnnotationDefault();
}
return null;
}
/**
* Visits an annotation of this method.
*
* @param desc
* the class descriptor of the annotation class.
* @param visible
* <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
if (mv != null) {
return mv.visitAnnotation(desc, visible);
}
return null;
}
/**
* Visits an annotation on a type in the method signature.
*
* @param typeRef
* a reference to the annotated type. The sort of this type
* reference must be {@link TypeReference#METHOD_TYPE_PARAMETER
* METHOD_TYPE_PARAMETER},
* {@link TypeReference#METHOD_TYPE_PARAMETER_BOUND
* METHOD_TYPE_PARAMETER_BOUND},
* {@link TypeReference#METHOD_RETURN METHOD_RETURN},
* {@link TypeReference#METHOD_RECEIVER METHOD_RECEIVER},
* {@link TypeReference#METHOD_FORMAL_PARAMETER
* METHOD_FORMAL_PARAMETER} or {@link TypeReference#THROWS
* THROWS}. See {@link TypeReference}.
* @param typePath
* the path to the annotated type argument, wildcard bound, array
* element type, or static inner type within 'typeRef'. May be
* <tt>null</tt> if the annotation targets 'typeRef' as a whole.
* @param desc
* the class descriptor of the annotation class.
* @param visible
* <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
public AnnotationVisitor visitTypeAnnotation(int typeRef,
TypePath typePath, String desc, boolean visible) {
if (api < Opcodes.ASM5) {
throw new RuntimeException();
}
if (mv != null) {
return mv.visitTypeAnnotation(typeRef, typePath, desc, visible);
}
return null;
}
/**
* Visits an annotation of a parameter this method.
*
* @param parameter
* the parameter index.
* @param desc
* the class descriptor of the annotation class.
* @param visible
* <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
public AnnotationVisitor visitParameterAnnotation(int parameter,
String desc, boolean visible) {
if (mv != null) {
return mv.visitParameterAnnotation(parameter, desc, visible);
}
return null;
}
/**
* Visits a non standard attribute of this method.
*
* @param attr
* an attribute.
*/
public void visitAttribute(Attribute attr) {
if (mv != null) {
mv.visitAttribute(attr);
}
}
/**
* Starts the visit of the method's code, if any (i.e. non abstract method).
*/
public void visitCode() {
if (mv != null) {
mv.visitCode();
}
}
/**
* Visits the current state of the local variables and operand stack
* elements. This method must(*) be called <i>just before</i> any
* instruction <b>i</b> that follows an unconditional branch instruction
* such as GOTO or THROW, that is the target of a jump instruction, or that
* starts an exception handler block. The visited types must describe the
* values of the local variables and of the operand stack elements <i>just
* before</i> <b>i</b> is executed.<br>
* <br>
* (*) this is mandatory only for classes whose version is greater than or
* equal to {@link Opcodes#V1_6 V1_6}. <br>
* <br>
* The frames of a method must be given either in expanded form, or in
* compressed form (all frames must use the same format, i.e. you must not
* mix expanded and compressed frames within a single method):
* <ul>
* <li>In expanded form, all frames must have the F_NEW type.</li>
* <li>In compressed form, frames are basically "deltas" from the state of
* the previous frame:
* <ul>
* <li>{@link Opcodes#F_SAME} representing frame with exactly the same
* locals as the previous frame and with the empty stack.</li>
* <li>{@link Opcodes#F_SAME1} representing frame with exactly the same
* locals as the previous frame and with single value on the stack (
* <code>nStack</code> is 1 and <code>stack[0]</code> contains value for the
* type of the stack item).</li>
* <li>{@link Opcodes#F_APPEND} representing frame with current locals are
* the same as the locals in the previous frame, except that additional
* locals are defined (<code>nLocal</code> is 1, 2 or 3 and
* <code>local</code> elements contains values representing added types).</li>
* <li>{@link Opcodes#F_CHOP} representing frame with current locals are the
* same as the locals in the previous frame, except that the last 1-3 locals
* are absent and with the empty stack (<code>nLocals</code> is 1, 2 or 3).</li>
* <li>{@link Opcodes#F_FULL} representing complete frame data.</li>
* </ul>
* </li>
* </ul>
* <br>
* In both cases the first frame, corresponding to the method's parameters
* and access flags, is implicit and must not be visited. Also, it is
* illegal to visit two or more frames for the same code location (i.e., at
* least one instruction must be visited between two calls to visitFrame).
*
* @param type
* the type of this stack map frame. Must be
* {@link Opcodes#F_NEW} for expanded frames, or
* {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
* {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
* {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for
* compressed frames.
* @param nLocal
* the number of local variables in the visited frame.
* @param local
* the local variable types in this frame. This array must not be
* modified. Primitive types are represented by
* {@link Opcodes#TOP}, {@link Opcodes#INTEGER},
* {@link Opcodes#FLOAT}, {@link Opcodes#LONG},
* {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or
* {@link Opcodes#UNINITIALIZED_THIS} (long and double are
* represented by a single element). Reference types are
* represented by String objects (representing internal names),
* and uninitialized types by Label objects (this label
* designates the NEW instruction that created this uninitialized
* value).
* @param nStack
* the number of operand stack elements in the visited frame.
* @param stack
* the operand stack types in this frame. This array must not be
* modified. Its content has the same format as the "local"
* array.
* @throws IllegalStateException
* if a frame is visited just after another one, without any
* instruction between the two (unless this frame is a
* Opcodes#F_SAME frame, in which case it is silently ignored).
*/
public void visitFrame(int type, int nLocal, Object[] local, int nStack,
Object[] stack) {
if (mv != null) {
mv.visitFrame(type, nLocal, local, nStack, stack);
}
}
// -------------------------------------------------------------------------
// Normal instructions
// -------------------------------------------------------------------------
/**
* Visits a zero operand instruction.
*
* @param opcode
* the opcode of the instruction to be visited. This opcode is
* either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
* ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
* FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD,
* LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD,
* IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE,
* SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1,
* DUP2_X2, SWAP, IADD, LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB,
* IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM,
* FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR,
* IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D,
* L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S,
* LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, FRETURN,
* DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER,
* or MONITOREXIT.
*/
public void visitInsn(int opcode) {
if (mv != null) {
mv.visitInsn(opcode);
}
}
/**
* Visits an instruction with a single int operand.
*
* @param opcode
* the opcode of the instruction to be visited. This opcode is
* either BIPUSH, SIPUSH or NEWARRAY.
* @param operand
* the operand of the instruction to be visited.<br>
* When opcode is BIPUSH, operand value should be between
* Byte.MIN_VALUE and Byte.MAX_VALUE.<br>
* When opcode is SIPUSH, operand value should be between
* Short.MIN_VALUE and Short.MAX_VALUE.<br>
* When opcode is NEWARRAY, operand value should be one of
* {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR},
* {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE},
* {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT},
* {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}.
*/
public void visitIntInsn(int opcode, int operand) {
if (mv != null) {
mv.visitIntInsn(opcode, operand);
}
}
/**
* Visits a local variable instruction. A local variable instruction is an
* instruction that loads or stores the value of a local variable.
*
* @param opcode
* the opcode of the local variable instruction to be visited.
* This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD,
* ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
* @param var
* the operand of the instruction to be visited. This operand is
* the index of a local variable.
*/
public void visitVarInsn(int opcode, int var) {
if (mv != null) {
mv.visitVarInsn(opcode, var);
}
}
/**
* Visits a type instruction. A type instruction is an instruction that
* takes the internal name of a class as parameter.
*
* @param opcode
* the opcode of the type instruction to be visited. This opcode
* is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
* @param type
* the operand of the instruction to be visited. This operand
* must be the internal name of an object or array class (see
* {@link Type#getInternalName() getInternalName}).
*/
public void visitTypeInsn(int opcode, String type) {
if (mv != null) {
mv.visitTypeInsn(opcode, type);
}
}
/**
* Visits a field instruction. A field instruction is an instruction that
* loads or stores the value of a field of an object.
*
* @param opcode
* the opcode of the type instruction to be visited. This opcode
* is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
* @param owner
* the internal name of the field's owner class (see
* {@link Type#getInternalName() getInternalName}).
* @param name
* the field's name.
* @param desc
* the field's descriptor (see {@link Type Type}).
*/
public void visitFieldInsn(int opcode, String owner, String name,
String desc) {
if (mv != null) {
mv.visitFieldInsn(opcode, owner, name, desc);
}
}
/**
* Visits a method instruction. A method instruction is an instruction that
* invokes a method.
*
* @param opcode
* the opcode of the type instruction to be visited. This opcode
* is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
* INVOKEINTERFACE.
* @param owner
* the internal name of the method's owner class (see
* {@link Type#getInternalName() getInternalName}).
* @param name
* the method's name.
* @param desc
* the method's descriptor (see {@link Type Type}).
*/
@Deprecated
public void visitMethodInsn(int opcode, String owner, String name,
String desc) {
if (api >= Opcodes.ASM5) {
boolean itf = opcode == Opcodes.INVOKEINTERFACE;
visitMethodInsn(opcode, owner, name, desc, itf);
return;
}
if (mv != null) {
mv.visitMethodInsn(opcode, owner, name, desc);
}
}
/**
* Visits a method instruction. A method instruction is an instruction that
* invokes a method.
*
* @param opcode
* the opcode of the type instruction to be visited. This opcode
* is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
* INVOKEINTERFACE.
* @param owner
* the internal name of the method's owner class (see
* {@link Type#getInternalName() getInternalName}).
* @param name
* the method's name.
* @param desc
* the method's descriptor (see {@link Type Type}).
* @param itf
* if the method's owner class is an interface.
*/
public void visitMethodInsn(int opcode, String owner, String name,
String desc, boolean itf) {
if (api < Opcodes.ASM5) {
if (itf != (opcode == Opcodes.INVOKEINTERFACE)) {
throw new IllegalArgumentException(
"INVOKESPECIAL/STATIC on interfaces require ASM 5");
}
visitMethodInsn(opcode, owner, name, desc);
return;
}
if (mv != null) {
mv.visitMethodInsn(opcode, owner, name, desc, itf);
}
}
/**
* Visits an invokedynamic instruction.
*
* @param name
* the method's name.
* @param desc
* the method's descriptor (see {@link Type Type}).
* @param bsm
* the bootstrap method.
* @param bsmArgs
* the bootstrap method constant arguments. Each argument must be
* an {@link Integer}, {@link Float}, {@link Long},
* {@link Double}, {@link String}, {@link Type} or {@link Handle}
* value. This method is allowed to modify the content of the
* array so a caller should expect that this array may change.
*/
public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
Object... bsmArgs) {
if (mv != null) {
mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
}
}
/**
* Visits a jump instruction. A jump instruction is an instruction that may
* jump to another instruction.
*
* @param opcode
* the opcode of the type instruction to be visited. This opcode
* is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
* IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
* IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
* @param label
* the operand of the instruction to be visited. This operand is
* a label that designates the instruction to which the jump
* instruction may jump.
*/
public void visitJumpInsn(int opcode, Label label) {
if (mv != null) {
mv.visitJumpInsn(opcode, label);
}
}
/**
* Visits a label. A label designates the instruction that will be visited
* just after it.
*
* @param label
* a {@link Label Label} object.
*/
public void visitLabel(Label label) {
if (mv != null) {
mv.visitLabel(label);
}
}
// -------------------------------------------------------------------------
// Special instructions
// -------------------------------------------------------------------------
/**
* Visits a LDC instruction. Note that new constant types may be added in
* future versions of the Java Virtual Machine. To easily detect new
* constant types, implementations of this method should check for
* unexpected constant types, like this:
*
* <pre>
* if (cst instanceof Integer) {
* // ...
* } else if (cst instanceof Float) {
* // ...
* } else if (cst instanceof Long) {
* // ...
* } else if (cst instanceof Double) {
* // ...
* } else if (cst instanceof String) {
* // ...
* } else if (cst instanceof Type) {
* int sort = ((Type) cst).getSort();
* if (sort == Type.OBJECT) {
* // ...
* } else if (sort == Type.ARRAY) {
* // ...
* } else if (sort == Type.METHOD) {
* // ...
* } else {
* // throw an exception
* }
* } else if (cst instanceof Handle) {
* // ...
* } else {
* // throw an exception
* }
* </pre>
*
* @param cst
* the constant to be loaded on the stack. This parameter must be
* a non null {@link Integer}, a {@link Float}, a {@link Long}, a
* {@link Double}, a {@link String}, a {@link Type} of OBJECT or
* ARRAY sort for <tt>.class</tt> constants, for classes whose
* version is 49.0, a {@link Type} of METHOD sort or a
* {@link Handle} for MethodType and MethodHandle constants, for
* classes whose version is 51.0.
*/
public void visitLdcInsn(Object cst) {
if (mv != null) {
mv.visitLdcInsn(cst);
}
}
/**
* Visits an IINC instruction.
*
* @param var
* index of the local variable to be incremented.
* @param increment
* amount to increment the local variable by.
*/
public void visitIincInsn(int var, int increment) {
if (mv != null) {
mv.visitIincInsn(var, increment);
}
}
/**
* Visits a TABLESWITCH instruction.
*
* @param min
* the minimum key value.
* @param max
* the maximum key value.
* @param dflt
* beginning of the default handler block.
* @param labels
* beginnings of the handler blocks. <tt>labels[i]</tt> is the
* beginning of the handler block for the <tt>min + i</tt> key.
*/
public void visitTableSwitchInsn(int min, int max, Label dflt,
Label... labels) {
if (mv != null) {
mv.visitTableSwitchInsn(min, max, dflt, labels);
}
}
/**
* Visits a LOOKUPSWITCH instruction.
*
* @param dflt
* beginning of the default handler block.
* @param keys
* the values of the keys.
* @param labels
* beginnings of the handler blocks. <tt>labels[i]</tt> is the
* beginning of the handler block for the <tt>keys[i]</tt> key.
*/
public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) {
if (mv != null) {
mv.visitLookupSwitchInsn(dflt, keys, labels);
}
}
/**
* Visits a MULTIANEWARRAY instruction.
*
* @param desc
* an array type descriptor (see {@link Type Type}).
* @param dims
* number of dimensions of the array to allocate.
*/
public void visitMultiANewArrayInsn(String desc, int dims) {
if (mv != null) {
mv.visitMultiANewArrayInsn(desc, dims);
}
}
/**
* Visits an annotation on an instruction. This method must be called just
* <i>after</i> the annotated instruction. It can be called several times
* for the same instruction.
*
* @param typeRef
* a reference to the annotated type. The sort of this type
* reference must be {@link TypeReference#INSTANCEOF INSTANCEOF},
* {@link TypeReference#NEW NEW},
* {@link TypeReference#CONSTRUCTOR_REFERENCE
* CONSTRUCTOR_REFERENCE}, {@link TypeReference#METHOD_REFERENCE
* METHOD_REFERENCE}, {@link TypeReference#CAST CAST},
* {@link TypeReference#CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT
* CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT},
* {@link TypeReference#METHOD_INVOCATION_TYPE_ARGUMENT
* METHOD_INVOCATION_TYPE_ARGUMENT},
* {@link TypeReference#CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT
* CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT}, or
* {@link TypeReference#METHOD_REFERENCE_TYPE_ARGUMENT
* METHOD_REFERENCE_TYPE_ARGUMENT}. See {@link TypeReference}.
* @param typePath
* the path to the annotated type argument, wildcard bound, array
* element type, or static inner type within 'typeRef'. May be
* <tt>null</tt> if the annotation targets 'typeRef' as a whole.
* @param desc
* the class descriptor of the annotation class.
* @param visible
* <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
public AnnotationVisitor visitInsnAnnotation(int typeRef,
TypePath typePath, String desc, boolean visible) {
if (api < Opcodes.ASM5) {
throw new RuntimeException();
}
if (mv != null) {
return mv.visitInsnAnnotation(typeRef, typePath, desc, visible);
}
return null;
}
// -------------------------------------------------------------------------
// Exceptions table entries, debug information, max stack and max locals
// -------------------------------------------------------------------------
/**
* Visits a try catch block.
*
* @param start
* beginning of the exception handler's scope (inclusive).
* @param end
* end of the exception handler's scope (exclusive).
* @param handler
* beginning of the exception handler's code.
* @param type
* internal name of the type of exceptions handled by the
* handler, or <tt>null</tt> to catch any exceptions (for
* "finally" blocks).
* @throws IllegalArgumentException
* if one of the labels has already been visited by this visitor
* (by the {@link #visitLabel visitLabel} method).
*/
public void visitTryCatchBlock(Label start, Label end, Label handler,
String type) {
if (mv != null) {
mv.visitTryCatchBlock(start, end, handler, type);
}
}
/**
* Visits an annotation on an exception handler type. This method must be
* called <i>after</i> the {@link #visitTryCatchBlock} for the annotated
* exception handler. It can be called several times for the same exception
* handler.
*
* @param typeRef
* a reference to the annotated type. The sort of this type
* reference must be {@link TypeReference#EXCEPTION_PARAMETER
* EXCEPTION_PARAMETER}. See {@link TypeReference}.
* @param typePath
* the path to the annotated type argument, wildcard bound, array
* element type, or static inner type within 'typeRef'. May be
* <tt>null</tt> if the annotation targets 'typeRef' as a whole.
* @param desc
* the class descriptor of the annotation class.
* @param visible
* <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
public AnnotationVisitor visitTryCatchAnnotation(int typeRef,
TypePath typePath, String desc, boolean visible) {
if (api < Opcodes.ASM5) {
throw new RuntimeException();
}
if (mv != null) {
return mv.visitTryCatchAnnotation(typeRef, typePath, desc, visible);
}
return null;
}
/**
* Visits a local variable declaration.
*
* @param name
* the name of a local variable.
* @param desc
* the type descriptor of this local variable.
* @param signature
* the type signature of this local variable. May be
* <tt>null</tt> if the local variable type does not use generic
* types.
* @param start
* the first instruction corresponding to the scope of this local
* variable (inclusive).
* @param end
* the last instruction corresponding to the scope of this local
* variable (exclusive).
* @param index
* the local variable's index.
* @throws IllegalArgumentException
* if one of the labels has not already been visited by this
* visitor (by the {@link #visitLabel visitLabel} method).
*/
public void visitLocalVariable(String name, String desc, String signature,
Label start, Label end, int index) {
if (mv != null) {
mv.visitLocalVariable(name, desc, signature, start, end, index);
}
}
/**
* Visits an annotation on a local variable type.
*
* @param typeRef
* a reference to the annotated type. The sort of this type
* reference must be {@link TypeReference#LOCAL_VARIABLE
* LOCAL_VARIABLE} or {@link TypeReference#RESOURCE_VARIABLE
* RESOURCE_VARIABLE}. See {@link TypeReference}.
* @param typePath
* the path to the annotated type argument, wildcard bound, array
* element type, or static inner type within 'typeRef'. May be
* <tt>null</tt> if the annotation targets 'typeRef' as a whole.
* @param start
* the fist instructions corresponding to the continuous ranges
* that make the scope of this local variable (inclusive).
* @param end
* the last instructions corresponding to the continuous ranges
* that make the scope of this local variable (exclusive). This
* array must have the same size as the 'start' array.
* @param index
* the local variable's index in each range. This array must have
* the same size as the 'start' array.
* @param desc
* the class descriptor of the annotation class.
* @param visible
* <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
public AnnotationVisitor visitLocalVariableAnnotation(int typeRef,
TypePath typePath, Label[] start, Label[] end, int[] index,
String desc, boolean visible) {
if (api < Opcodes.ASM5) {
throw new RuntimeException();
}
if (mv != null) {
return mv.visitLocalVariableAnnotation(typeRef, typePath, start,
end, index, desc, visible);
}
return null;
}
/**
* Visits a line number declaration.
*
* @param line
* a line number. This number refers to the source file from
* which the class was compiled.
* @param start
* the first instruction corresponding to this line number.
* @throws IllegalArgumentException
* if <tt>start</tt> has not already been visited by this
* visitor (by the {@link #visitLabel visitLabel} method).
*/
public void visitLineNumber(int line, Label start) {
if (mv != null) {
mv.visitLineNumber(line, start);
}
}
/**
* Visits the maximum stack size and the maximum number of local variables
* of the method.
*
* @param maxStack
* maximum stack size of the method.
* @param maxLocals
* maximum number of local variables for the method.
*/
public void visitMaxs(int maxStack, int maxLocals) {
if (mv != null) {
mv.visitMaxs(maxStack, maxLocals);
}
}
/**
* Visits the end of the method. This method, which is the last one to be
* called, is used to inform the visitor that all the annotations and
* attributes of the method have been visited.
*/
public void visitEnd() {
if (mv != null) {
mv.visitEnd();
}
}
}
| |
package com.ajjpj.asysmon.measure;
import com.ajjpj.afoundation.collection.mutable.ArrayStack;
import com.ajjpj.afoundation.function.AFunction0NoThrow;
import com.ajjpj.asysmon.config.ASysMonConfig;
import com.ajjpj.asysmon.config.log.ASysMonLogger;
import com.ajjpj.asysmon.data.ACorrelationId;
import com.ajjpj.asysmon.data.AHierarchicalData;
import com.ajjpj.asysmon.data.AHierarchicalDataRoot;
import com.ajjpj.asysmon.datasink.ADataSink;
import java.util.*;
/**
* This class collects a tree of hierarchical measurements. It lives in a single thread.
*
* @author arno
*/
public class AMeasurementHierarchyImpl implements AMeasurementHierarchy {
private static final ASysMonLogger log = ASysMonLogger.get(AMeasurementHierarchyImpl.class);
private final ASysMonConfig config;
private final ADataSink dataSink;
private Set<ACollectingMeasurement> collectingMeasurements = new HashSet<>();
private int size = 0; // total number of measurements in this hierarchy
private final ArrayStack<ASimpleSerialMeasurementImpl> unfinished = new ArrayStack<>();
private final ArrayStack<List<AHierarchicalData>> childrenStack = new ArrayStack<>();
private final Collection<ACorrelationId> startedFlows = new HashSet<>();
private final Collection<ACorrelationId> joinedFlows = new HashSet<>();
/**
* shows if this measurement was finished in an orderly fashion
*/
private boolean isFinished = false;
/**
* shows if this measurement was killed forcibly e.g. because there was an overflow on the stack
*/
private boolean wasKilled = false;
public AMeasurementHierarchyImpl(ASysMonConfig config, ADataSink dataSink) {
this.config = config;
this.dataSink = dataSink;
}
private boolean checkNotFinished () {
if(isFinished) {
log.error (new IllegalStateException("This measurement is already closed."));
}
return isFinished;
}
@Override public ASimpleMeasurement start(String identifier, boolean isSerial) {
if(config.isGloballyDisabled() || checkNotFinished()) {
return new ASimpleMeasurement() {
@Override public void finish() {
}
@Override public void addParameter(String identifier, String value) {
}
};
}
if(unfinished.isEmpty()) {
dataSink.onStartedHierarchicalMeasurement(identifier);
}
if(isSerial) {
checkOverflow();
final ASimpleSerialMeasurementImpl result = new ASimpleSerialMeasurementImpl(this, config.timer.getCurrentNanos(), identifier);
unfinished.push(result);
size += 1;
childrenStack.push(new ArrayList<AHierarchicalData>());
return result;
}
else {
return new ASimpleParallelMeasurementImpl(this, config.timer.getCurrentNanos(), identifier, childrenStack.peek());
}
}
private void checkOverflow() {
checkMaxDepth ();
checkMaxSize ();
}
private void checkMaxSize () {
if (wasKilled || size < config.maxNumMeasurementsPerHierarchy) {
return;
}
final ASimpleSerialMeasurementImpl rootMeasurement = doKillForcefully ();
log.error ("Excessive number of measurements in a single hierarchy: " + size + " - probable memory leak, forcefully cleaning measurement stack. Root measurement was " +
rootMeasurement.getIdentifier() + " with parameters " + rootMeasurement.getParameters() + ", started at " + new Date(rootMeasurement.getStartTimeMillis()));
}
private ASimpleSerialMeasurementImpl doKillForcefully() {
ASimpleSerialMeasurementImpl rootMeasurement = null;
while(unfinished.nonEmpty()) {
rootMeasurement = unfinished.peek();
finish (unfinished.peek());
}
return rootMeasurement;
}
private void checkMaxDepth () {
if (wasKilled || unfinished.size() < config.maxNestedMeasurements) {
return;
}
final ASimpleSerialMeasurementImpl rootMeasurement = doKillForcefully ();
log.error ("Call depth " + unfinished.size () + " - probable memory leak, forcefully cleaning measurement stack. Root measurement was " +
rootMeasurement.getIdentifier() + " with parameters " + rootMeasurement.getParameters() + ", started at " + new Date(rootMeasurement.getStartTimeMillis()));
}
private void logWasKilled() {
log.debug (new AFunction0NoThrow<String> () {
@Override public String apply () {
return "Interacting with a forcefully killed measurement. This is a consequence of A-SysMon cleaning up a (suspected) memory leak. It has no consequences aside from potentially weird measurements being reported.";
}
});
}
@Override public void finish(ASimpleSerialMeasurementImpl measurement) {
if(ASysMonConfig.isGloballyDisabled()) {
return;
}
if(wasKilled) {
logWasKilled();
return;
}
if (checkNotFinished ()) {
return;
}
if (unfinished.peek() != measurement) {
// This is basically a bug in using code: a measurement is 'finished' without being the innermost measurement
// of this hierarchy. The most typical reason for this - and the only one we can recover from - is that
// using code skipped finishing an inner measurement and is now finishing something further outside.
if(unfinished.contains(measurement)) {
log.warn("Calling 'finish' on a measurement " + measurement + " that is not innermost on the stack.");
while(unfinished.peek() != measurement) {
log.warn("-> Implicitly unrolling the stack of open measurements: " + unfinished.peek());
finish(unfinished.peek());
}
}
else {
log.error (new IllegalStateException("Calling 'finish' on a measurement that is not on the measurement stack: " + measurement));
return;
}
}
final long finishedTimestamp = config.timer.getCurrentNanos();
unfinished.pop();
final List<AHierarchicalData> children = childrenStack.pop();
final AHierarchicalData newData = new AHierarchicalData(true, measurement.getStartTimeMillis(), finishedTimestamp - measurement.getStartTimeNanos(), measurement.getIdentifier(), measurement.getParameters(), children);
if(unfinished.isEmpty()) {
// copy into a separate collection because the collection is modified in the loop
for(ACollectingMeasurement m: new ArrayList<>(collectingMeasurements)) {
finish(m);
}
isFinished = true;
dataSink.onFinishedHierarchicalMeasurement(new AHierarchicalDataRoot(newData, startedFlows, joinedFlows));
}
else {
childrenStack.peek().add(newData);
}
}
@Override public void finish(ASimpleParallelMeasurementImpl m) {
if(ASysMonConfig.isGloballyDisabled()) {
return;
}
if(wasKilled) {
logWasKilled();
return;
}
if (checkNotFinished ()) {
return;
}
final long finishedTimestamp = config.timer.getCurrentNanos();
m.getChildrenOfParent().add(new AHierarchicalData(false, m.getStartTimeMillis(), finishedTimestamp - m.getStartTimeNanos(), m.getIdentifier(), m.getParameters(), Collections.<AHierarchicalData>emptyList()));
}
@Override
public ACollectingMeasurement startCollectingMeasurement (final String identifier, boolean isSerial) {
if(ASysMonConfig.isGloballyDisabled() || checkNotFinished()) {
return ACollectingMeasurement.createDisabled ();
}
if(unfinished.isEmpty()) {
// A collection measurement can never be top-level. To be on the safe side, we just ignore this, losing measurement data rather than risking non-robust code.
// Declarative transactions can cause this if the start and especially the end of a transaction are not surrounded by an ASysMon measurement
log.debug (new AFunction0NoThrow<String> () {
@Override public String apply () {
return "Trying to start a collectiong mesaurement outside of a measurement hierarchy: " + identifier;
}
});
return ACollectingMeasurement.createDisabled ();
}
else {
final ACollectingMeasurement result = ACollectingMeasurement.createRegular (config, this, isSerial, identifier, childrenStack.peek());
collectingMeasurements.add(result);
size += 1;
return result;
}
}
@Override public void finish(ACollectingMeasurement m) {
if(ASysMonConfig.isGloballyDisabled()) {
return;
}
if(wasKilled) {
logWasKilled();
return;
}
if (checkNotFinished ()) {
return;
}
final List<AHierarchicalData> children = new ArrayList<AHierarchicalData>();
for(String detailIdentifier: m.getDetails().keySet()) {
final ACollectingMeasurement.Detail detail = m.getDetails().get(detailIdentifier);
children.add(new AHierarchicalData(true, m.getStartTimeMillis(), detail.getTotalNanos(), detailIdentifier, Collections.<String, String>emptyMap(), Collections.<AHierarchicalData>emptyList()));
}
final AHierarchicalData newData = new AHierarchicalData(m.isSerial(), m.getStartTimeMillis(), m.getTotalDurationNanos(), m.getIdentifier(), m.getParameters(), children);
m.getChildrenOfParent().add(newData);
collectingMeasurements.remove(m);
}
/**
* notifies that this measurements contains the start of a new 'flow', i.e. it is the first point in a (potential) chain
* of measurements that are somehow correlated.
*/
@Override public void onStartFlow(ACorrelationId correlationId) {
if(!startedFlows.add(correlationId)) {
log.warn("called 'startFlow' for flow " + correlationId + " twice");
}
}
/**
* notifies that this measurements is part of an existing 'flow', i.e. there is another measurement that 'started' a
* set of measurements that are somehow correlated.
*/
@Override public void onJoinFlow(ACorrelationId correlationId) {
if(!joinedFlows.add(correlationId)) {
log.warn("called 'joinFlow' for flow " + correlationId + " twice");
}
}
}
| |
/* Copyright (c) 2014, Paul L. Snyder <paul@pataprogramming.com>,
* Daniel Dubois, Nicolo Calcavecchia.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* Any later version. It may also be redistributed and/or modified under the
* terms of the BSD 3-Clause License.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc., 59
* Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package myconet;
import peersim.cdsim.*;
import peersim.config.*;
import peersim.core.*;
import peersim.util.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.logging.*;
import java.io.*;
//import java.awt.*;
import java.awt.geom.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.event.*;
import edu.uci.ics.jung.algorithms.importance.*;
import edu.uci.ics.jung.algorithms.layout.*;
import edu.uci.ics.jung.algorithms.shortestpath.*;
import edu.uci.ics.jung.algorithms.util.*;
import edu.uci.ics.jung.graph.*;
import edu.uci.ics.jung.graph.util.*;
import edu.uci.ics.jung.visualization.*;
import edu.uci.ics.jung.visualization.control.*;
import edu.uci.ics.jung.visualization.decorators.*;
import edu.uci.ics.jung.visualization.picking.*;
import edu.uci.ics.jung.visualization.renderers.*;
import org.apache.commons.collections15.*;
import org.apache.commons.collections15.functors.*;
public class UtilizationObserver implements Control {
private Graph<MycoNode,MycoEdge> graph;
//private Forest<MycoNode,MycoEdge> forest;
//private DistanceStatistics ds = new DistanceStatistics();
private static final String PAR_PERIOD = "period";
private static int period;
private static Set<ChangeListener> changeListeners =
new HashSet<ChangeListener>();
public static double nodeCount;
public static double biomassCount;
public static double hyphaCount;
public static double extendingCount;
public static double branchingCount;
public static double immobileCount;
public static double bulwarkCount;
public static double averageDegree;
public static double averageExtendingDegree;
public static double averageBranchingDegree;
public static double averageImmobileDegree;
public static double averageBulwarkDegree;
public static double totalDegree;
public static double totalExtendingDegree;
public static double totalBranchingDegree;
public static double totalImmobileDegree;
public static double totalBulwarkDegree;
public static double hyphaUtilization;
public static double extendingUtilization;
public static double branchingUtilization;
public static double immobileUtilization;
public static double bulwarkUtilization;
public static double totalHyphaCapacity;
public static double totalBiomassCapacity;
public static double totalExtendingCapacity;
public static double totalBranchingCapacity;
public static double totalImmobileCapacity;
public static double totalBulwarkCapacity;
public static double utilLow;
public static double util50;
public static double util80;
public static double util95;
public static double util100;
public static double utilOver;
public static double extendingUtilLow;
public static double extendingUtil50;
public static double extendingUtil80;
public static double extendingUtil95;
public static double extendingUtil100;
public static double extendingUtilOver;
public static double branchingUtilLow;
public static double branchingUtil50;
public static double branchingUtil80;
public static double branchingUtil95;
public static double branchingUtil100;
public static double branchingUtilOver;
public static double immobileUtilLow;
public static double immobileUtil50;
public static double immobileUtil80;
public static double immobileUtil95;
public static double immobileUtil100;
public static double immobileUtilOver;
public static double utilLowCount;
public static double util50Count;
public static double util80Count;
public static double util95Count;
public static double util100Count;
public static double utilOverCount;
public static double extendingUtilLowCount;
public static double extendingUtil50Count;
public static double extendingUtil80Count;
public static double extendingUtil95Count;
public static double extendingUtil100Count;
public static double extendingUtilOverCount;
public static double branchingUtilLowCount;
public static double branchingUtil50Count;
public static double branchingUtil80Count;
public static double branchingUtil95Count;
public static double branchingUtil100Count;
public static double branchingUtilOverCount;
public static double immobileUtilLowCount;
public static double immobileUtil50Count;
public static double immobileUtil80Count;
public static double immobileUtil95Count;
public static double immobileUtil100Count;
public static double immobileUtilOverCount;
public static double hyphaRatio;
public static double extendingHyphaRatio;
public static double branchingHyphaRatio;
public static double immobileHyphaRatio;
public static double bulwarkRatio;
public static double totalBranchingUtil;
public static double totalExtendingUtil;
public static double totalImmobileUtil;
public static double totalBulwarkUtil;
public static double averageUtil;
public static double averageExtendingUtil;
public static double averageBranchingUtil;
public static double averageImmobileUtil;
public static double averageBulwarkUtil;
public static double stableUtilization;
public static double stableCount;
public static double totalStableCapacity;
public static double stableHyphaRatio;
public UtilizationObserver(String prefix) {
period = Configuration.getInt(prefix + "." + PAR_PERIOD);
graph = JungGraphObserver.getGraph();
clearStats();
ExperimentWriter.addMetric(new Metric<Double>("extendingDegreeMean") {public Double fetch() { return UtilizationObserver.averageExtendingDegree; }});
ExperimentWriter.addMetric(new Metric<Double>("branchingDegreeMean") {public Double fetch() { return UtilizationObserver.averageBranchingDegree; }});
ExperimentWriter.addMetric(new Metric<Double>("immobileDegreeMean") {public Double fetch() { return UtilizationObserver.averageImmobileDegree; }});
ExperimentWriter.addMetric(new Metric<Double>("bulwarkDegreeMean") {public Double fetch() { return UtilizationObserver.averageBulwarkDegree; }});
ExperimentWriter.addMetric(new Metric<Integer>("totalDegree") {public Integer fetch() { return (new Double(UtilizationObserver.totalDegree)).intValue(); }});
ExperimentWriter.addMetric(new Metric<Integer>("extendingDegree") {public Integer fetch() { return (new Double(UtilizationObserver.totalExtendingDegree)).intValue(); }});
ExperimentWriter.addMetric(new Metric<Integer>("branchingDegree") {public Integer fetch() { return (new Double(UtilizationObserver.totalBranchingDegree)).intValue(); }});
ExperimentWriter.addMetric(new Metric<Integer>("immobileDegree") {public Integer fetch() { return (new Double(UtilizationObserver.totalImmobileDegree)).intValue(); }});
ExperimentWriter.addMetric(new Metric<Integer>("bulwarkDegree") {public Integer fetch() { return (new Double(UtilizationObserver.totalBulwarkDegree)).intValue(); }});
ExperimentWriter.addMetric(new Metric<Integer>("hyphaCapacity") {public Integer fetch() { return (new Double(UtilizationObserver.totalHyphaCapacity)).intValue(); }});
ExperimentWriter.addMetric(new Metric<Integer>("biomassCapacity") {public Integer fetch() { return (new Double(UtilizationObserver.totalBiomassCapacity)).intValue(); }});
ExperimentWriter.addMetric(new Metric<Integer>("extendingCapacity") {public Integer fetch() { return (new Double(UtilizationObserver.totalExtendingCapacity)).intValue(); }});
ExperimentWriter.addMetric(new Metric<Integer>("branchingCapacity") {public Integer fetch() { return (new Double(UtilizationObserver.totalBranchingCapacity)).intValue(); }});
ExperimentWriter.addMetric(new Metric<Integer>("immobileCapacity") {public Integer fetch() { return (new Double(UtilizationObserver.totalImmobileCapacity)).intValue(); }});
}
private static void clearStats() {
nodeCount = 0.0;
biomassCount = 0.0;
hyphaCount = 0.0;
extendingCount = 0.0;
branchingCount = 0.0;
immobileCount = 0.0;
bulwarkCount = 0.0;
averageDegree = 0.0;
averageExtendingDegree = 0.0;
averageBranchingDegree = 0.0;
averageImmobileDegree = 0.0;
averageBulwarkDegree = 0.0;
totalDegree = 0.0;
totalExtendingDegree = 0.0;
totalBranchingDegree = 0.0;
totalImmobileDegree = 0.0;
totalBulwarkDegree = 0.0;
hyphaUtilization = 0.0;
extendingUtilization = 0.0;
branchingUtilization = 0.0;
immobileUtilization = 0.0;
bulwarkUtilization = 0.0;
totalHyphaCapacity = 0.0;
totalBiomassCapacity = 0.0;
totalExtendingCapacity = 0.0;
totalBranchingCapacity = 0.0;
totalImmobileCapacity = 0.0;
totalBulwarkCapacity = 0.0;
utilLow = 0.0;
util50 = 0.0;
util80 = 0.0;
util95 = 0.0;
util100 = 0.0;
utilOver = 0.0;
extendingUtilLow = 0.0;
extendingUtil50 = 0.0;
extendingUtil80 = 0.0;
extendingUtil95 = 0.0;
extendingUtil100 = 0.0;
extendingUtilOver = 0.0;
branchingUtilLow = 0.0;
branchingUtil50 = 0.0;
branchingUtil80 = 0.0;
branchingUtil95 = 0.0;
branchingUtil100 = 0.0;
branchingUtilOver = 0.0;
immobileUtilLow = 0.0;
immobileUtil50 = 0.0;
immobileUtil80 = 0.0;
immobileUtil95 = 0.0;
immobileUtil100 = 0.0;
immobileUtilOver = 0.0;
utilLowCount = 0.0;
util50Count = 0.0;
util80Count = 0.0;
util95Count = 0.0;
util100Count = 0.0;
utilOverCount = 0.0;
extendingUtilLowCount = 0.0;
extendingUtil50Count = 0.0;
extendingUtil80Count = 0.0;
extendingUtil95Count = 0.0;
extendingUtil100Count = 0.0;
extendingUtilOverCount = 0.0;
branchingUtilLowCount = 0.0;
branchingUtil50Count = 0.0;
branchingUtil80Count = 0.0;
branchingUtil95Count = 0.0;
branchingUtil100Count = 0.0;
branchingUtilOverCount = 0.0;
immobileUtilLowCount = 0.0;
immobileUtil50Count = 0.0;
immobileUtil80Count = 0.0;
immobileUtil95Count = 0.0;
immobileUtil100Count = 0.0;
immobileUtilOverCount = 0.0;
hyphaRatio = 0.0;
extendingHyphaRatio = 0.0;
branchingHyphaRatio = 0.0;
immobileHyphaRatio = 0.0;
bulwarkRatio = 0.0;
totalBranchingUtil = 0.0;
totalExtendingUtil = 0.0;
totalImmobileUtil = 0.0;
totalBulwarkUtil = 0.0;
averageUtil = 0.0;
averageExtendingUtil = 0.0;
averageBranchingUtil = 0.0;
averageImmobileUtil = 0.0;
averageBulwarkUtil = 0.0;
stableUtilization = 0.0;
stableCount = 0.0;
totalStableCapacity = 0.0;
stableHyphaRatio = 0.0;
}
public static void addChangeListener(ChangeListener cl) {
changeListeners.add(cl);
}
public static void removeChangeListener(ChangeListener cl) {
if (changeListeners.contains(cl)) {
changeListeners.remove(cl);
}
}
public static void notifyChangeListeners() {
for (ChangeListener cl : changeListeners) {
cl.stateChanged(new ChangeEvent(UtilizationObserver.class));
}
}
public static void updateStats() {
clearStats();
MycoList bulwark = MycoCast.getBulwarkNodes();
MycoList extending = MycoCast.getExtendingHyphae();
MycoList branching = MycoCast.getBranchingHyphae();
MycoList immobile = MycoCast.getImmobileHyphae();
MycoList biomass = MycoCast.getAllBiomass();
MycoList hyphae = MycoCast.getAllHyphae();
MycoList all = MycoCast.getAllNodes();
bulwarkCount = bulwark.size();
hyphaCount = hyphae.size();
biomassCount = biomass.size();
extendingCount = extending.size();
branchingCount = branching.size();
immobileCount = immobile.size();
nodeCount = all.size();
double nodeUtil;
//totalDegree = 0.0;
//totalStableDegree = 0.0;
double totalUtil = 0.0;
double totalExtendingUtil = 0.0;
double totalBranchingUtil = 0.0;
double totalImmobileUtil = 0.0;
double totalBulwarkUtil = 0.0;
totalHyphaCapacity = 0.0;
totalStableCapacity = 0.0;
totalBulwarkCapacity = 0.0;
for (MycoNode n : biomass) {
totalBiomassCapacity += n.getHyphaData().getMaxCapacity();
}
for (MycoNode n : bulwark) {
totalBulwarkCapacity += n.getHyphaData().getMaxCapacity();
nodeUtil = (new Integer(n.getHyphaLink().degree())).doubleValue()
/ n.getHyphaData().getIdealBiomass();
totalBulwarkUtil += nodeUtil;
totalBulwarkDegree += n.getHyphaLink().degree();
}
for (MycoNode n : hyphae) {
//boolean isStable = n.getHyphaData().isBranching() ||
// n.getHyphaData().isImmobile();
boolean isExtending = n.getHyphaData().isExtending();
boolean isBranching = n.getHyphaData().isBranching();
boolean isImmobile = n.getHyphaData().isImmobile();
totalDegree += n.getHyphaLink().degree();
totalHyphaCapacity += n.getHyphaData().getMaxCapacity();
nodeUtil = (new Integer(n.getHyphaLink().sameBiomassDegree())).doubleValue()
/ n.getHyphaData().getIdealBiomass();
totalUtil += nodeUtil;
if (isExtending) {
totalExtendingUtil += nodeUtil;
totalExtendingDegree += n.getHyphaLink().degree();
totalExtendingCapacity += n.getHyphaData().getMaxCapacity();
}
if (isBranching) {
totalBranchingUtil += nodeUtil;
totalBranchingDegree += n.getHyphaLink().degree();
totalBranchingCapacity += n.getHyphaData().getMaxCapacity();
}
if (isImmobile) {
totalImmobileUtil += nodeUtil;
totalImmobileDegree += n.getHyphaLink().degree();
totalImmobileCapacity += n.getHyphaData().getMaxCapacity();
}
if (nodeUtil < 0.5) {
utilLowCount += 1.0;
if (isExtending) {
extendingUtilLowCount += 1.0;
} else if (isBranching) {
branchingUtilLowCount += 1.0;
} else if (isImmobile) {
immobileUtilLowCount += 1.0;
}
}
if (nodeUtil >= 0.5) {
util50Count += 1.0;
if (isExtending) {
extendingUtil50Count += 1.0;
} else if (isBranching) {
branchingUtil50Count += 1.0;
} else if (isImmobile) {
immobileUtil50Count += 1.0;
}
}
if (nodeUtil >= 0.8) {
util80Count += 1.0;
if (isExtending) {
extendingUtil80Count += 1.0;
} else if (isBranching) {
branchingUtil80Count += 1.0;
} else if (isImmobile) {
immobileUtil80Count += 1.0;
}
}
if (nodeUtil >= 0.95) {
util95Count += 1.0;
if (isExtending) {
extendingUtil95Count += 1.0;
} else if (isBranching) {
branchingUtil95Count += 1.0;
} else if (isImmobile) {
immobileUtil95Count += 1.0;
}
}
if (nodeUtil == 1.0) {
util100Count += 1.0;
if (isExtending) {
extendingUtil100Count += 1.0;
} else if (isBranching) {
branchingUtil100Count += 1.0;
} else if (isImmobile) {
immobileUtil100Count += 1.0;
}
}
if (nodeUtil > 1.0) {
utilOverCount += 1.0;
if (isExtending) {
extendingUtilOverCount += 1.0;
} else if (isBranching) {
branchingUtilOverCount += 1.0;
} else if (isImmobile) {
immobileUtilOverCount += 1.0;
}
}
}
if (hyphaCount != 0.0) {
utilLow = utilLowCount / hyphaCount;
util50 = util50Count / hyphaCount;
util80 = util80Count / hyphaCount;
util95 = util95Count / hyphaCount;
util100 = util100Count / hyphaCount;
utilOver = utilOverCount / hyphaCount;
averageDegree = totalDegree / hyphaCount;
averageUtil = totalUtil / hyphaCount;
}
if (extendingCount != 0.0) {
extendingUtilLow = extendingUtilLowCount / extendingCount;
extendingUtil50 = extendingUtil50Count / extendingCount;
extendingUtil80 = extendingUtil80Count / extendingCount;
extendingUtil95 = extendingUtil95Count / extendingCount;
extendingUtil100 = extendingUtil100Count / extendingCount;
extendingUtilOver = extendingUtilOverCount / extendingCount;
averageExtendingDegree = totalExtendingDegree / extendingCount;
averageExtendingUtil = totalExtendingUtil / extendingCount;
}
if (branchingCount != 0.0) {
branchingUtilLow = branchingUtilLowCount / branchingCount;
branchingUtil50 = branchingUtil50Count / branchingCount;
branchingUtil80 = branchingUtil80Count / branchingCount;
branchingUtil95 = branchingUtil95Count / branchingCount;
branchingUtil100 = branchingUtil100Count / branchingCount;
branchingUtilOver = branchingUtilOverCount / branchingCount;
averageBranchingDegree = totalBranchingDegree / branchingCount;
averageBranchingUtil = totalBranchingUtil / branchingCount;
}
if (immobileCount != 0.0) {
immobileUtilLow = immobileUtilLowCount / immobileCount;
immobileUtil50 = immobileUtil50Count / immobileCount;
immobileUtil80 = immobileUtil80Count / immobileCount;
immobileUtil95 = immobileUtil95Count / immobileCount;
immobileUtil100 = immobileUtil100Count / immobileCount;
immobileUtilOver = immobileUtilOverCount / immobileCount;
averageImmobileDegree = totalImmobileDegree / immobileCount;
averageImmobileUtil = totalImmobileUtil / immobileCount;
}
if (totalHyphaCapacity != 0.0) {
hyphaUtilization = totalDegree / totalHyphaCapacity;
}
if (totalExtendingCapacity != 0.0) {
extendingUtilization = totalExtendingDegree/totalExtendingCapacity;
}
if (totalBranchingCapacity != 0.0) {
branchingUtilization = totalBranchingDegree/totalBranchingCapacity;
}
if (totalImmobileCapacity != 0.0) {
immobileUtilization = totalImmobileDegree / totalImmobileCapacity;
}
if (totalBulwarkCapacity != 0.0) {
bulwarkUtilization = totalBulwarkDegree / totalBulwarkCapacity;
}
if (nodeCount != 0.0) {
hyphaRatio = hyphaCount / nodeCount;
extendingHyphaRatio = extendingCount / nodeCount;
branchingHyphaRatio = branchingCount / nodeCount;
immobileHyphaRatio = immobileCount / nodeCount;
bulwarkRatio = bulwarkCount / nodeCount;
}
stableCount = immobileCount + branchingCount;
if (stableCount != 0.0) {
stableUtilization = ((totalImmobileUtil +
totalBranchingUtil)
/ stableCount);
stableHyphaRatio = stableCount / nodeCount;
}
notifyChangeListeners();
}
private static Logger log =
Logger.getLogger(UtilizationObserver.class.getName());
public boolean execute() {
if (CDState.getCycle() % period != 0)
return false;
updateStats();
StringBuilder sb = new StringBuilder();
java.util.Formatter f = new java.util.Formatter(sb, Locale.US);
f.format("<50%%: %1.3f 50%%: %1.3f 80%%: %1.3f \n95%%: %1.3f 100%%: %1.3f Over%%: %1.3f\n", utilLow, util50, util80, util95, util100, utilOver);
f.format("Avg Degree: %.2f Avg Util: %1.3f Hypha Ratio: %1.3f\n",
averageDegree, averageUtil, hyphaRatio);
f.format("Hyphal Utilization: %1.3f Stable Hypha Utilization %1.3f\n",
hyphaUtilization, stableUtilization);
f.format("Total Hyphal Capacity: %1.0f Total Stable Hyphal Capacity: %1.0f\n",
totalHyphaCapacity, totalStableCapacity);
f.format("Network Size: %d\n", Network.size());
log.info(sb.toString());
return false;
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.util.bin.format.elf.extend;
import java.io.*;
import ghidra.app.util.bin.format.MemoryLoadable;
import ghidra.app.util.bin.format.elf.*;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressSpace;
import ghidra.program.model.lang.Language;
import ghidra.util.exception.CancelledException;
import ghidra.util.task.TaskMonitor;
public class PIC30_ElfExtension extends ElfExtension {
public static final int EM_DSPIC30F = 118; /* Microchip Technology dsPIC30F DSC */
// ELF Header Flags (e_flags)
public static final int P30F = 1 << 0;
public static final int P30FSMPS = 1 << 1;
public static final int P33F = 1 << 2;
public static final int P24F = 1 << 3;
public static final int P24H = 1 << 4;
public static final int P24FK = 1 << 5;
public static final int P33E = 1 << 6;
public static final int P24E = 1 << 7;
// Section Header Flags (sh_flags)
public static final int SHF_MEMORY = (1 << 18); /* User-defined memory */
public static final int SHF_UNUSED = (1 << 19); /* Unused */
/* OS and processor-specific flags start at postion 20 */
public static final int SHF_SECURE = (1 << 20); /* Secure segment */
public static final int SHF_BOOT = (1 << 21); /* Boot segment */
public static final int SHF_DMA = (1 << 22); /* DMA memory */
public static final int SHF_NOLOAD = (1 << 23); /* Do not allocate or load */
public static final int SHF_NEAR = (1 << 24); /* Near memory */
public static final int SHF_PERSIST = (1 << 25); /* Persistent */
public static final int SHF_XMEM = (1 << 26); /* X Memory */
public static final int SHF_YMEM = (1 << 27); /* Y Memory */
public static final int SHF_PSV = (1 << 28); /* Constants in program memory */
public static final int SHF_EEDATA = (1 << 29); /* Data Flash memory */
public static final int SHF_ABSOLUTE = (1 << 30); /* Absolute address */
public static final int SHF_REVERSE = (1 << 31); /* Reverse aligned */
/**
NOTES:
EDS/PSV Sections - section data resides with ROM space but is accessable via the
the RAM data space at 0x8000 - 0xFFFF with the use of page register. Page use
may vary by CPU (EDS, PSV low-word access, PSV high-word access). PSV high-word
access capability is only provided when EDS is supported. See page registers
DSRPAG and DSWPAG. Page registers must be non-zero when used. Page boundary
handling must be explicitly handled in code. EDS memory may be directly
accessed provided the page register as been
Three ways to access page memory:
1. Direct access using DSRPAG/DSWPAGpage registers (PIC24E, dsPIC33E and dsPIC33C).
With read/write page register set to non-zero value, offset 0..0x7FFF within
the page may be directly accessed by first setting bit-15 of offset before
performing a load or store to the range 0x8000..0xFFFF.
2. Table read/write instruction may be used by setting TBLPAG register and
performing operation with a table offset in the range 0..0x7FFF.
3. PSV direct access with PSVPAG register (PIC24F, PIC24H, dsPIC30F AND dsPIC33F).
Set PSV bit of CORCONL register, set page in PSVPAG register (macro psvpage() used
to obtain page from symbol). Access location with offset 0..0x7FFF (macro psvoffset() used
to obtain offset from symbol). Macro produces offset in the range 0x8000..0xFFFF.
**/
@Override
public boolean canHandle(ElfHeader elf) {
return elf.e_machine() == EM_DSPIC30F;
}
@Override
public boolean canHandle(ElfLoadHelper elfLoadHelper) {
// TODO: The PIC-30/24 utilize too many different processor names instead of
// variant names !!
return canHandle(elfLoadHelper.getElfHeader());
}
@Override
public String getDataTypeSuffix() {
return "_PIC30";
}
@Override
public void processElf(ElfLoadHelper elfLoadHelper, TaskMonitor monitor)
throws CancelledException {
// TODO: Create mapped blocks
}
@Override
public AddressSpace getPreferredSegmentAddressSpace(ElfLoadHelper elfLoadHelper,
ElfProgramHeader elfProgramHeader) {
Language language = elfLoadHelper.getProgram().getLanguage();
if (isDataLoad(elfProgramHeader)) {
return language.getDefaultDataSpace();
}
return language.getDefaultSpace();
}
@Override
public AddressSpace getPreferredSectionAddressSpace(ElfLoadHelper elfLoadHelper,
ElfSectionHeader elfSectionHeader) {
Language language = elfLoadHelper.getProgram().getLanguage();
if (isDataLoad(elfSectionHeader)) {
return language.getDefaultDataSpace();
}
return language.getDefaultSpace();
}
private long getAdjustedDataLoadSize(long dataLoadFileSize) {
return dataLoadFileSize / 2;
}
private boolean isDataLoad(ElfProgramHeader elfProgramHeader) {
return !elfProgramHeader.isExecute();
}
private boolean isDataLoad(ElfSectionHeader section) {
if (!section.isAlloc()) {
return isDebugSection(section);
}
return !section.isExecutable();
}
private boolean isDataLoad(MemoryLoadable loadable) {
if (loadable instanceof ElfSectionHeader) {
return isDataLoad((ElfSectionHeader)loadable);
}
return isDataLoad((ElfProgramHeader)loadable);
}
private boolean isDebugSection(ElfSectionHeader section) {
String name = section.getNameAsString();
return name.startsWith(".debug_") || ".comment".equals(name);
}
private boolean isDebugSection(MemoryLoadable loadable) {
if (loadable instanceof ElfSectionHeader) {
return isDebugSection((ElfSectionHeader)loadable);
}
return false;
}
@Override
public long getAdjustedLoadSize(ElfProgramHeader elfProgramHeader) {
long fileSize = elfProgramHeader.getFileSize();
return isDataLoad(elfProgramHeader) ? getAdjustedDataLoadSize(fileSize) : fileSize;
}
@Override
public long getAdjustedMemorySize(ElfProgramHeader elfProgramHeader) {
long rawSize = elfProgramHeader.getMemorySize();
return isDataLoad(elfProgramHeader) ? getAdjustedDataLoadSize(rawSize) : rawSize;
}
@Override
public long getAdjustedSize(ElfSectionHeader section) {
long rawSize = section.getSize();
return isDataLoad(section) ? getAdjustedDataLoadSize(rawSize) : rawSize;
}
@Override
public InputStream getFilteredLoadInputStream(ElfLoadHelper elfLoadHelper,
MemoryLoadable loadable, Address start, long dataLength, InputStream dataInput) {
Language language = elfLoadHelper.getProgram().getLanguage();
if (!isDataLoad(loadable) && !language.getDefaultDataSpace().equals(start.getAddressSpace().getPhysicalSpace())) {
return dataInput;
}
if (loadable instanceof ElfSectionHeader) {
ElfSectionHeader section = (ElfSectionHeader) loadable;
if (!elfLoadHelper.getElfHeader().isRelocatable() && (section.getFlags() & SHF_PSV) != 0) {
// TODO: this is really mapped into ROM space where PT_LOAD was done to physical memory
// In the absence of suitable mapping, we will load into RAM space
return new PIC30FilteredPSVDataInputStream(dataInput);
}
}
else {
return new PIC30FilteredPSVDataInputStream(dataInput);
}
// Data space loading pads after every byte with Microchip toolchain
// NOTE: this could vary and we may need to improve detection of this situation
return new PIC30FilteredDataInputStream(dataInput, !isDebugSection(loadable));
}
@Override
public boolean hasFilteredLoadInputStream(ElfLoadHelper elfLoadHelper, MemoryLoadable loadable,
Address start) {
if (loadable == null) {
return false;
}
if (isDataLoad(loadable)) {
return true;
}
Language language = elfLoadHelper.getProgram().getLanguage();
return language.getDefaultDataSpace().equals(start.getAddressSpace().getPhysicalSpace());
}
@Override
public int getDefaultAlignment(ElfLoadHelper elfLoadHelper) {
return 4; // alignment for external symbol allocation
}
private static class PIC30FilteredDataInputStream extends FilterInputStream {
// BYTES: <byte> <pad>
protected boolean padByteToggle;
protected long pos;
private final boolean checkPadding;
protected PIC30FilteredDataInputStream(InputStream in, boolean checkPadding) {
super(in);
padByteToggle = false; // first byte is data not padding
this.checkPadding = checkPadding;
}
protected int readNextByte() throws IOException {
int r = in.read();
if (checkPadding && padByteToggle && r != 0) {
// expected padding - debug sections appear to be inconsistent with filler
throw new IOException("expected Data padding byte, pos=" + pos);
}
++pos;
padByteToggle = !padByteToggle;
return r;
}
@Override
public int read() throws IOException {
while (padByteToggle) {
int r = readNextByte();
if (r < 0) {
return r;
}
}
return readNextByte();
}
@Override
public int read(byte b[], int off, int len) throws IOException {
if (b == null) {
throw new NullPointerException();
}
else if (off < 0 || len < 0 || len > b.length - off) {
throw new IndexOutOfBoundsException();
}
else if (len == 0) {
return 0;
}
int numRead = -1;
for (int i = 1; i <= len; i++) {
int c = read();
if (c == -1) {
break;
}
b[off++] = (byte) c;
numRead = i;
}
return numRead;
}
}
private static class PIC30FilteredPSVDataInputStream extends PIC30FilteredDataInputStream {
// BYTES: <byte0> <byte1> <pad0> <pad1>
private boolean firstByteToggle; // firstByte of data or pad
protected PIC30FilteredPSVDataInputStream(InputStream in) {
super(in, true);
firstByteToggle = true;
}
@Override
protected int readNextByte() throws IOException {
int r = in.read();
++pos;
if (!firstByteToggle) {
padByteToggle = !padByteToggle;
}
firstByteToggle = !firstByteToggle;
return r;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.sql.fluent.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.annotation.JsonFlatten;
import com.azure.core.management.ProxyResource;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.sql.models.DataMaskingFunction;
import com.azure.resourcemanager.sql.models.DataMaskingRuleState;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/** Represents a database data masking rule. */
@JsonFlatten
@Fluent
public class DataMaskingRuleInner extends ProxyResource {
@JsonIgnore private final ClientLogger logger = new ClientLogger(DataMaskingRuleInner.class);
/*
* The location of the data masking rule.
*/
@JsonProperty(value = "location", access = JsonProperty.Access.WRITE_ONLY)
private String location;
/*
* The kind of Data Masking Rule. Metadata, used for Azure portal.
*/
@JsonProperty(value = "kind", access = JsonProperty.Access.WRITE_ONLY)
private String kind;
/*
* The rule Id.
*/
@JsonProperty(value = "properties.id", access = JsonProperty.Access.WRITE_ONLY)
private String idPropertiesId;
/*
* The alias name. This is a legacy parameter and is no longer used.
*/
@JsonProperty(value = "properties.aliasName")
private String aliasName;
/*
* The rule state. Used to delete a rule. To delete an existing rule,
* specify the schemaName, tableName, columnName, maskingFunction, and
* specify ruleState as disabled. However, if the rule doesn't already
* exist, the rule will be created with ruleState set to enabled,
* regardless of the provided value of ruleState.
*/
@JsonProperty(value = "properties.ruleState")
private DataMaskingRuleState ruleState;
/*
* The schema name on which the data masking rule is applied.
*/
@JsonProperty(value = "properties.schemaName")
private String schemaName;
/*
* The table name on which the data masking rule is applied.
*/
@JsonProperty(value = "properties.tableName")
private String tableName;
/*
* The column name on which the data masking rule is applied.
*/
@JsonProperty(value = "properties.columnName")
private String columnName;
/*
* The masking function that is used for the data masking rule.
*/
@JsonProperty(value = "properties.maskingFunction")
private DataMaskingFunction maskingFunction;
/*
* The numberFrom property of the masking rule. Required if maskingFunction
* is set to Number, otherwise this parameter will be ignored.
*/
@JsonProperty(value = "properties.numberFrom")
private String numberFrom;
/*
* The numberTo property of the data masking rule. Required if
* maskingFunction is set to Number, otherwise this parameter will be
* ignored.
*/
@JsonProperty(value = "properties.numberTo")
private String numberTo;
/*
* If maskingFunction is set to Text, the number of characters to show
* unmasked in the beginning of the string. Otherwise, this parameter will
* be ignored.
*/
@JsonProperty(value = "properties.prefixSize")
private String prefixSize;
/*
* If maskingFunction is set to Text, the number of characters to show
* unmasked at the end of the string. Otherwise, this parameter will be
* ignored.
*/
@JsonProperty(value = "properties.suffixSize")
private String suffixSize;
/*
* If maskingFunction is set to Text, the character to use for masking the
* unexposed part of the string. Otherwise, this parameter will be ignored.
*/
@JsonProperty(value = "properties.replacementString")
private String replacementString;
/**
* Get the location property: The location of the data masking rule.
*
* @return the location value.
*/
public String location() {
return this.location;
}
/**
* Get the kind property: The kind of Data Masking Rule. Metadata, used for Azure portal.
*
* @return the kind value.
*/
public String kind() {
return this.kind;
}
/**
* Get the idPropertiesId property: The rule Id.
*
* @return the idPropertiesId value.
*/
public String idPropertiesId() {
return this.idPropertiesId;
}
/**
* Get the aliasName property: The alias name. This is a legacy parameter and is no longer used.
*
* @return the aliasName value.
*/
public String aliasName() {
return this.aliasName;
}
/**
* Set the aliasName property: The alias name. This is a legacy parameter and is no longer used.
*
* @param aliasName the aliasName value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withAliasName(String aliasName) {
this.aliasName = aliasName;
return this;
}
/**
* Get the ruleState property: The rule state. Used to delete a rule. To delete an existing rule, specify the
* schemaName, tableName, columnName, maskingFunction, and specify ruleState as disabled. However, if the rule
* doesn't already exist, the rule will be created with ruleState set to enabled, regardless of the provided value
* of ruleState.
*
* @return the ruleState value.
*/
public DataMaskingRuleState ruleState() {
return this.ruleState;
}
/**
* Set the ruleState property: The rule state. Used to delete a rule. To delete an existing rule, specify the
* schemaName, tableName, columnName, maskingFunction, and specify ruleState as disabled. However, if the rule
* doesn't already exist, the rule will be created with ruleState set to enabled, regardless of the provided value
* of ruleState.
*
* @param ruleState the ruleState value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withRuleState(DataMaskingRuleState ruleState) {
this.ruleState = ruleState;
return this;
}
/**
* Get the schemaName property: The schema name on which the data masking rule is applied.
*
* @return the schemaName value.
*/
public String schemaName() {
return this.schemaName;
}
/**
* Set the schemaName property: The schema name on which the data masking rule is applied.
*
* @param schemaName the schemaName value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withSchemaName(String schemaName) {
this.schemaName = schemaName;
return this;
}
/**
* Get the tableName property: The table name on which the data masking rule is applied.
*
* @return the tableName value.
*/
public String tableName() {
return this.tableName;
}
/**
* Set the tableName property: The table name on which the data masking rule is applied.
*
* @param tableName the tableName value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withTableName(String tableName) {
this.tableName = tableName;
return this;
}
/**
* Get the columnName property: The column name on which the data masking rule is applied.
*
* @return the columnName value.
*/
public String columnName() {
return this.columnName;
}
/**
* Set the columnName property: The column name on which the data masking rule is applied.
*
* @param columnName the columnName value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withColumnName(String columnName) {
this.columnName = columnName;
return this;
}
/**
* Get the maskingFunction property: The masking function that is used for the data masking rule.
*
* @return the maskingFunction value.
*/
public DataMaskingFunction maskingFunction() {
return this.maskingFunction;
}
/**
* Set the maskingFunction property: The masking function that is used for the data masking rule.
*
* @param maskingFunction the maskingFunction value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withMaskingFunction(DataMaskingFunction maskingFunction) {
this.maskingFunction = maskingFunction;
return this;
}
/**
* Get the numberFrom property: The numberFrom property of the masking rule. Required if maskingFunction is set to
* Number, otherwise this parameter will be ignored.
*
* @return the numberFrom value.
*/
public String numberFrom() {
return this.numberFrom;
}
/**
* Set the numberFrom property: The numberFrom property of the masking rule. Required if maskingFunction is set to
* Number, otherwise this parameter will be ignored.
*
* @param numberFrom the numberFrom value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withNumberFrom(String numberFrom) {
this.numberFrom = numberFrom;
return this;
}
/**
* Get the numberTo property: The numberTo property of the data masking rule. Required if maskingFunction is set to
* Number, otherwise this parameter will be ignored.
*
* @return the numberTo value.
*/
public String numberTo() {
return this.numberTo;
}
/**
* Set the numberTo property: The numberTo property of the data masking rule. Required if maskingFunction is set to
* Number, otherwise this parameter will be ignored.
*
* @param numberTo the numberTo value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withNumberTo(String numberTo) {
this.numberTo = numberTo;
return this;
}
/**
* Get the prefixSize property: If maskingFunction is set to Text, the number of characters to show unmasked in the
* beginning of the string. Otherwise, this parameter will be ignored.
*
* @return the prefixSize value.
*/
public String prefixSize() {
return this.prefixSize;
}
/**
* Set the prefixSize property: If maskingFunction is set to Text, the number of characters to show unmasked in the
* beginning of the string. Otherwise, this parameter will be ignored.
*
* @param prefixSize the prefixSize value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withPrefixSize(String prefixSize) {
this.prefixSize = prefixSize;
return this;
}
/**
* Get the suffixSize property: If maskingFunction is set to Text, the number of characters to show unmasked at the
* end of the string. Otherwise, this parameter will be ignored.
*
* @return the suffixSize value.
*/
public String suffixSize() {
return this.suffixSize;
}
/**
* Set the suffixSize property: If maskingFunction is set to Text, the number of characters to show unmasked at the
* end of the string. Otherwise, this parameter will be ignored.
*
* @param suffixSize the suffixSize value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withSuffixSize(String suffixSize) {
this.suffixSize = suffixSize;
return this;
}
/**
* Get the replacementString property: If maskingFunction is set to Text, the character to use for masking the
* unexposed part of the string. Otherwise, this parameter will be ignored.
*
* @return the replacementString value.
*/
public String replacementString() {
return this.replacementString;
}
/**
* Set the replacementString property: If maskingFunction is set to Text, the character to use for masking the
* unexposed part of the string. Otherwise, this parameter will be ignored.
*
* @param replacementString the replacementString value to set.
* @return the DataMaskingRuleInner object itself.
*/
public DataMaskingRuleInner withReplacementString(String replacementString) {
this.replacementString = replacementString;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
}
}
| |
package name.reidmiller.sppreports.client;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.TreeSet;
import name.reidmiller.sppreports.model.GeneratorMix;
import name.reidmiller.sppreports.model.SamplingFrequency;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import au.com.bytecode.opencsv.CSVReader;
public class GeneratorMixClient {
public static final String GENERATOR_MIX_REPORT_DATE_FORMAT = "M/d/yyyy H:mm";
public static final DateTimeZone US_CENTRAL_ZONE = DateTimeZone.forID("America/Chicago");
private Logger logger = LogManager.getLogger(this.getClass());
private DateTimeFormatter centralTimeFormat;
/**
* GeneratorMixClient constructor sets {@link #centralTimeFormat} using
* {@value #GENERATOR_MIX_REPORT_DATE_FORMAT} and {@link #US_CENTRAL_ZONE}.
*/
public GeneratorMixClient() {
DateTimeFormatter localDateTimeFormat = DateTimeFormat
.forPattern(GENERATOR_MIX_REPORT_DATE_FORMAT);
this.centralTimeFormat = localDateTimeFormat.withZone(US_CENTRAL_ZONE);
}
/**
* @param samplingFrequency
* Either five-minute or hourly report.
* @return List of {@link GeneratorMix} objects from the start of the
* current year to the current date.
*/
public List<GeneratorMix> getDefaultGeneratorMixes(
SamplingFrequency samplingFrequency) {
return this.getGenMixesForYear(DateTime.now().getYear(),
samplingFrequency);
}
/**
* @param year
* Year of report to request.
* @param samplingFrequency
* Switches the report URL String between five-minute and hourly
* using {@link SamplingFrequency#getUrlPart()}.
* @return SPP GenerationMix report URL string stitched together based on
* parameters passed to method.
*/
public String getUrlString(int year, SamplingFrequency samplingFrequency) {
String urlString = "http://www.spp.org/GenerationMix/" + year + "_"
+ samplingFrequency.getUrlPart() + "_GenMix.csv";
logger.debug("Parsing URL " + urlString);
return urlString;
}
/**
* Iterates over time range specified and repeatedly calls
* {@link #getGenMixesForYear(int, SamplingFrequency)} to build out the List
* of {@link GeneratorMix} objects. Items outside of date range are filtered
* out if necessary.
*
* @param samplingFrequency
*
* @param startDate
* Lower bound of {@link GeneratorMix} objects in the List
* returned.
* @param endDate
* Upper bound of {@link GeneratorMix} objects in the List
* returned.
* @return List of {@link GeneratorMix} objects in the specified date range.
*/
public List<GeneratorMix> getGeneratorMixesInRange(
SamplingFrequency samplingFrequency, Date startDate, Date endDate) {
DateTime startDateTime = new DateTime(startDate);
DateTime endDateTime = new DateTime(endDate);
TreeSet<Integer> yearRange = new TreeSet<Integer>();
for (int y = startDateTime.getYear(); y <= endDateTime.getYear(); y++) {
yearRange.add(y);
}
List<GeneratorMix> generatorMixes = new ArrayList<GeneratorMix>();
for (int year : yearRange) {
List<GeneratorMix> yearGenMixes = this.getGenMixesForYear(year,
samplingFrequency);
if (year > startDateTime.getYear() && year < endDateTime.getYear()) {
generatorMixes.addAll(yearGenMixes);
} else if (year == startDateTime.getYear()
&& year == endDateTime.getYear()) {
for (GeneratorMix genMix : yearGenMixes) {
if (genMix.getDate().compareTo(startDate) >= 0
&& genMix.getDate().compareTo(endDate) <= 0) {
generatorMixes.add(genMix);
}
}
} else if (year == startDateTime.getYear()) {
for (GeneratorMix genMix : yearGenMixes) {
if (genMix.getDate().compareTo(startDate) >= 0) {
generatorMixes.add(genMix);
}
}
} else if (year == endDateTime.getYear()) {
for (GeneratorMix genMix : yearGenMixes) {
if (genMix.getDate().compareTo(endDate) <= 0) {
generatorMixes.add(genMix);
}
}
}
}
return generatorMixes;
}
/**
* Method retrieves the GeneratorMix objects for a given year. Because the
* CSV provides times without offset or daylight savings information, this
* method does a bit of work to find and correct the error, creating
* {@link Date} objects with proper offset information.
*
* @param year
* Year of report.
* @param samplingFrequency
* Five-minute or hourly sampling frequency controls which report
* URL is created. Passed to
* {@link #getUrlString(int, SamplingFrequency)} along with year.
* @return List of {@link GeneratorMix} objects for the year.
*/
public List<GeneratorMix> getGenMixesForYear(int year,
SamplingFrequency samplingFrequency) {
// An arbitrary date known to be in the middle of daylight savings.
DateTime august1st = new DateTime(year, 8, 1, 0, 0, 0, 0, US_CENTRAL_ZONE);
// Instant of transition from daylight savings to standard time.
Instant cdtToCst = new Instant(US_CENTRAL_ZONE.nextTransition(august1st
.toInstant().getMillis()));
// Instant of last daylight savings sample in the report.
Instant lastCdt = null;
// When last sample in DST has been hit, trigger standard time fix.
boolean startCstFix = false;
// Number of records to fix changes for five-minute or hourly report.
int numCstFixed = 0;
int cstFixLimit = 0;
switch (samplingFrequency) {
case FIVE_MINUTES:
lastCdt = cdtToCst.minus(Duration.standardMinutes(5));
cstFixLimit = 12;
break;
case HOURLY:
lastCdt = cdtToCst.minus(Duration.standardHours(1));
cstFixLimit = 1;
break;
}
logger.debug("Last sample of CDT is "
+ lastCdt.toDateTime(US_CENTRAL_ZONE));
List<GeneratorMix> generatorMixes = new ArrayList<GeneratorMix>();
String urlString = this.getUrlString(year, samplingFrequency);
try {
URL url = new URL(urlString);
BufferedReader in = new BufferedReader(new InputStreamReader(
url.openStream()));
CSVReader reader = new CSVReader(in);
// Loop over each row of CSV report
String[] csvLine = reader.readNext();
for (int i = 0; csvLine != null; i++) {
// Skip first row and empty rows
if (i > 0 && csvLine[0] != null && !csvLine[0].isEmpty()) {
GeneratorMix generatorMix = new GeneratorMix();
DateTime genMixDateTime = centralTimeFormat
.parseDateTime(csvLine[0]);
// If DST flag has been set and fix count is under limit
if (startCstFix && numCstFixed < cstFixLimit) {
Instant incorrectCst = genMixDateTime.toInstant();
Instant correctCdt = incorrectCst.plus(Duration
.standardHours(1));
logger.debug("Incorrect Date "
+ incorrectCst.toDateTime(US_CENTRAL_ZONE)
+ " corrected to "
+ correctCdt.toDateTime(US_CENTRAL_ZONE));
generatorMix.setDate(correctCdt.toDate());
numCstFixed++;
} else {
generatorMix.setDate(genMixDateTime.toDate());
}
// When last DST row hit, trigger fix to tart next iteration
if (genMixDateTime.isEqual(lastCdt)) {
startCstFix = true;
}
generatorMix.setCoal(Double.parseDouble(csvLine[1]));
generatorMix.setHydro(Double.parseDouble(csvLine[2]));
generatorMix.setDieselFuelOil(Double
.parseDouble(csvLine[3]));
generatorMix.setNaturalGas(Double.parseDouble(csvLine[4]));
generatorMix.setNuclear(Double.parseDouble(csvLine[5]));
generatorMix.setWind(Double.parseDouble(csvLine[6]));
generatorMix.setMarketLoad(Double.parseDouble(csvLine[7]));
generatorMixes.add(generatorMix);
}
// Step to next line
csvLine = reader.readNext();
}
reader.close();
} catch (MalformedURLException e) {
logger.warn("Could not create " + URL.class + " from \""
+ urlString + "\"");
} catch (IOException e) {
logger.error(e.getMessage());
}
return generatorMixes;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.AbstractConfig;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigDef.Importance;
import org.apache.kafka.common.config.ConfigDef.Type;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.streams.processor.DefaultPartitionGrouper;
import org.apache.kafka.streams.processor.internals.KafkaStreamingPartitionAssignor;
import org.apache.kafka.streams.processor.internals.StreamThread;
import java.util.Map;
import static org.apache.kafka.common.config.ConfigDef.Range.atLeast;
public class StreamingConfig extends AbstractConfig {
private static final ConfigDef CONFIG;
/** <code>state.dir</code> */
public static final String STATE_DIR_CONFIG = "state.dir";
private static final String STATE_DIR_DOC = "Directory location for state store.";
/** <code>commit.interval.ms</code> */
public static final String COMMIT_INTERVAL_MS_CONFIG = "commit.interval.ms";
private static final String COMMIT_INTERVAL_MS_DOC = "The frequency with which to save the position of the processor.";
/** <code>poll.ms</code> */
public static final String POLL_MS_CONFIG = "poll.ms";
private static final String POLL_MS_DOC = "The amount of time in milliseconds to block waiting for input.";
/** <code>num.stream.threads</code> */
public static final String NUM_STREAM_THREADS_CONFIG = "num.stream.threads";
private static final String NUM_STREAM_THREADS_DOC = "The number of threads to execute stream processing.";
/** <code>num.stream.threads</code> */
public static final String NUM_STANDBY_REPLICAS_CONFIG = "num.standby.replicas";
private static final String NUM_STANDBY_REPLICAS_DOC = "The number of standby replicas for each task.";
/** <code>buffered.records.per.partition</code> */
public static final String BUFFERED_RECORDS_PER_PARTITION_CONFIG = "buffered.records.per.partition";
private static final String BUFFERED_RECORDS_PER_PARTITION_DOC = "The maximum number of records to buffer per partition.";
/** <code>state.cleanup.delay</code> */
public static final String STATE_CLEANUP_DELAY_MS_CONFIG = "state.cleanup.delay.ms";
private static final String STATE_CLEANUP_DELAY_MS_DOC = "The amount of time in milliseconds to wait before deleting state when a partition has migrated.";
/** <code>total.records.to.process</code> */
public static final String TOTAL_RECORDS_TO_PROCESS = "total.records.to.process";
private static final String TOTAL_RECORDS_TO_DOC = "Exit after processing this many records.";
/** <code>window.time.ms</code> */
public static final String WINDOW_TIME_MS_CONFIG = "window.time.ms";
private static final String WINDOW_TIME_MS_DOC = "Setting this to a non-negative value will cause the processor to get called "
+ "with this frequency even if there is no message.";
/** <code>timestamp.extractor</code> */
public static final String TIMESTAMP_EXTRACTOR_CLASS_CONFIG = "timestamp.extractor";
private static final String TIMESTAMP_EXTRACTOR_CLASS_DOC = "Timestamp extractor class that implements the <code>TimestampExtractor</code> interface.";
/** <code>partition.grouper</code> */
public static final String PARTITION_GROUPER_CLASS_CONFIG = "partition.grouper";
private static final String PARTITION_GROUPER_CLASS_DOC = "Partition grouper class that implements the <code>PartitionGrouper</code> interface.";
/** <code>client.id</code> */
public static final String CLIENT_ID_CONFIG = CommonClientConfigs.CLIENT_ID_CONFIG;
/** <code>key.serializer</code> */
public static final String KEY_SERIALIZER_CLASS_CONFIG = ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
/** <code>value.serializer</code> */
public static final String VALUE_SERIALIZER_CLASS_CONFIG = ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
/** <code>key.deserializer</code> */
public static final String KEY_DESERIALIZER_CLASS_CONFIG = ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG;
/** <code>value.deserializer</code> */
public static final String VALUE_DESERIALIZER_CLASS_CONFIG = ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG;
/** <code>metrics.sample.window.ms</code> */
public static final String METRICS_SAMPLE_WINDOW_MS_CONFIG = CommonClientConfigs.METRICS_SAMPLE_WINDOW_MS_CONFIG;
/** <code>metrics.num.samples</code> */
public static final String METRICS_NUM_SAMPLES_CONFIG = CommonClientConfigs.METRICS_NUM_SAMPLES_CONFIG;
/** <code>metric.reporters</code> */
public static final String METRIC_REPORTER_CLASSES_CONFIG = CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG;
/**
* <code>bootstrap.servers</code>
*/
public static final String BOOTSTRAP_SERVERS_CONFIG = CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG;
private static final String SYSTEM_TEMP_DIRECTORY = System.getProperty("java.io.tmpdir");
static {
CONFIG = new ConfigDef().define(CLIENT_ID_CONFIG,
Type.STRING,
"",
Importance.MEDIUM,
CommonClientConfigs.CLIENT_ID_DOC)
.define(STATE_DIR_CONFIG,
Type.STRING,
SYSTEM_TEMP_DIRECTORY,
Importance.MEDIUM,
STATE_DIR_DOC)
.define(COMMIT_INTERVAL_MS_CONFIG,
Type.LONG,
30000,
Importance.HIGH,
COMMIT_INTERVAL_MS_DOC)
.define(POLL_MS_CONFIG,
Type.LONG,
100,
Importance.LOW,
POLL_MS_DOC)
.define(NUM_STREAM_THREADS_CONFIG,
Type.INT,
1,
Importance.LOW,
NUM_STREAM_THREADS_DOC)
.define(NUM_STANDBY_REPLICAS_CONFIG,
Type.INT,
0,
Importance.LOW,
NUM_STANDBY_REPLICAS_DOC)
.define(BUFFERED_RECORDS_PER_PARTITION_CONFIG,
Type.INT,
1000,
Importance.LOW,
BUFFERED_RECORDS_PER_PARTITION_DOC)
.define(STATE_CLEANUP_DELAY_MS_CONFIG,
Type.LONG,
60000,
Importance.LOW,
STATE_CLEANUP_DELAY_MS_DOC)
.define(TOTAL_RECORDS_TO_PROCESS,
Type.LONG,
-1L,
Importance.LOW,
TOTAL_RECORDS_TO_DOC)
.define(WINDOW_TIME_MS_CONFIG,
Type.LONG,
-1L,
Importance.MEDIUM,
WINDOW_TIME_MS_DOC)
.define(KEY_SERIALIZER_CLASS_CONFIG,
Type.CLASS,
Importance.HIGH,
ProducerConfig.KEY_SERIALIZER_CLASS_DOC)
.define(VALUE_SERIALIZER_CLASS_CONFIG,
Type.CLASS,
Importance.HIGH,
ProducerConfig.VALUE_SERIALIZER_CLASS_DOC)
.define(KEY_DESERIALIZER_CLASS_CONFIG,
Type.CLASS,
Importance.HIGH,
ConsumerConfig.KEY_DESERIALIZER_CLASS_DOC)
.define(VALUE_DESERIALIZER_CLASS_CONFIG,
Type.CLASS,
Importance.HIGH,
ConsumerConfig.VALUE_DESERIALIZER_CLASS_DOC)
.define(TIMESTAMP_EXTRACTOR_CLASS_CONFIG,
Type.CLASS,
Importance.HIGH,
TIMESTAMP_EXTRACTOR_CLASS_DOC)
.define(PARTITION_GROUPER_CLASS_CONFIG,
Type.CLASS,
DefaultPartitionGrouper.class,
Importance.HIGH,
PARTITION_GROUPER_CLASS_DOC)
.define(BOOTSTRAP_SERVERS_CONFIG,
Type.STRING,
Importance.HIGH,
CommonClientConfigs.BOOSTRAP_SERVERS_DOC)
.define(METRIC_REPORTER_CLASSES_CONFIG,
Type.LIST,
"",
Importance.LOW,
CommonClientConfigs.METRIC_REPORTER_CLASSES_DOC)
.define(METRICS_SAMPLE_WINDOW_MS_CONFIG,
Type.LONG,
30000,
atLeast(0),
Importance.LOW,
CommonClientConfigs.METRICS_SAMPLE_WINDOW_MS_DOC)
.define(METRICS_NUM_SAMPLES_CONFIG,
Type.INT,
2,
atLeast(1),
Importance.LOW,
CommonClientConfigs.METRICS_NUM_SAMPLES_DOC);
}
public static class InternalConfig {
public static final String STREAM_THREAD_INSTANCE = "__stream.thread.instance__";
}
public StreamingConfig(Map<?, ?> props) {
super(CONFIG, props);
}
public Map<String, Object> getConsumerConfigs(StreamThread streamThread) {
Map<String, Object> props = getRestoreConsumerConfigs();
props.put(StreamingConfig.NUM_STANDBY_REPLICAS_CONFIG, getInt(StreamingConfig.NUM_STANDBY_REPLICAS_CONFIG));
props.put(StreamingConfig.InternalConfig.STREAM_THREAD_INSTANCE, streamThread);
props.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, KafkaStreamingPartitionAssignor.class.getName());
return props;
}
public Map<String, Object> getRestoreConsumerConfigs() {
Map<String, Object> props = getBaseConsumerConfigs();
// no group id for a restore consumer
props.remove(ConsumerConfig.GROUP_ID_CONFIG);
return props;
}
private Map<String, Object> getBaseConsumerConfigs() {
Map<String, Object> props = this.originals();
// set consumer default property values
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
// remove properties that are not required for consumers
props.remove(StreamingConfig.KEY_SERIALIZER_CLASS_CONFIG);
props.remove(StreamingConfig.VALUE_SERIALIZER_CLASS_CONFIG);
props.remove(StreamingConfig.TIMESTAMP_EXTRACTOR_CLASS_CONFIG);
return props;
}
public Map<String, Object> getProducerConfigs() {
Map<String, Object> props = this.originals();
// set producer default property values
props.put(ProducerConfig.LINGER_MS_CONFIG, "100");
// remove properties that are not required for producers
props.remove(StreamingConfig.KEY_DESERIALIZER_CLASS_CONFIG);
props.remove(StreamingConfig.VALUE_DESERIALIZER_CLASS_CONFIG);
props.remove(StreamingConfig.TIMESTAMP_EXTRACTOR_CLASS_CONFIG);
return props;
}
public Serializer keySerializer() {
return getConfiguredInstance(StreamingConfig.KEY_SERIALIZER_CLASS_CONFIG, Serializer.class);
}
public Serializer valueSerializer() {
return getConfiguredInstance(StreamingConfig.VALUE_SERIALIZER_CLASS_CONFIG, Serializer.class);
}
public Deserializer keyDeserializer() {
return getConfiguredInstance(StreamingConfig.KEY_DESERIALIZER_CLASS_CONFIG, Deserializer.class);
}
public Deserializer valueDeserializer() {
return getConfiguredInstance(StreamingConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Deserializer.class);
}
public static void main(String[] args) {
System.out.println(CONFIG.toHtmlTable());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.bootstrap;
import org.elasticsearch.Version;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
/**
* Simple check for duplicate class files across the classpath.
* <p>
* This class checks for incompatibilities in the following ways:
* <ul>
* <li>Checks that class files are not duplicated across jars.</li>
* <li>Checks any {@code X-Compile-Target-JDK} value in the jar
* manifest is compatible with current JRE</li>
* <li>Checks any {@code X-Compile-Elasticsearch-Version} value in
* the jar manifest is compatible with the current ES</li>
* </ul>
*/
public class JarHell {
/** no instantiation */
private JarHell() {}
/** Simple driver class, can be used eg. from builds. Returns non-zero on jar-hell */
@SuppressForbidden(reason = "command line tool")
public static void main(String args[]) throws Exception {
System.out.println("checking for jar hell...");
checkJarHell();
System.out.println("no jar hell found");
}
/**
* Checks the current classpath for duplicate classes
* @throws IllegalStateException if jar hell was found
*/
public static void checkJarHell() throws Exception {
ClassLoader loader = JarHell.class.getClassLoader();
ESLogger logger = Loggers.getLogger(JarHell.class);
if (logger.isDebugEnabled()) {
logger.debug("java.class.path: {}", System.getProperty("java.class.path"));
logger.debug("sun.boot.class.path: {}", System.getProperty("sun.boot.class.path"));
if (loader instanceof URLClassLoader ) {
logger.debug("classloader urls: {}", Arrays.toString(((URLClassLoader)loader).getURLs()));
}
}
checkJarHell(parseClassPath());
}
/**
* Parses the classpath into an array of URLs
* @return array of URLs
* @throws IllegalStateException if the classpath contains empty elements
*/
public static URL[] parseClassPath() {
return parseClassPath(System.getProperty("java.class.path"));
}
/**
* Parses the classpath into a set of URLs. For testing.
* @param classPath classpath to parse (typically the system property {@code java.class.path})
* @return array of URLs
* @throws IllegalStateException if the classpath contains empty elements
*/
@SuppressForbidden(reason = "resolves against CWD because that is how classpaths work")
static URL[] parseClassPath(String classPath) {
String pathSeparator = System.getProperty("path.separator");
String fileSeparator = System.getProperty("file.separator");
String elements[] = classPath.split(pathSeparator);
URL urlElements[] = new URL[elements.length];
for (int i = 0; i < elements.length; i++) {
String element = elements[i];
// Technically empty classpath element behaves like CWD.
// So below is the "correct" code, however in practice with ES, this is usually just a misconfiguration,
// from old shell scripts left behind or something:
// if (element.isEmpty()) {
// element = System.getProperty("user.dir");
// }
// Instead we just throw an exception, and keep it clean.
if (element.isEmpty()) {
throw new IllegalStateException("Classpath should not contain empty elements! (outdated shell script from a previous version?) classpath='" + classPath + "'");
}
// we should be able to just Paths.get() each element, but unfortunately this is not the
// whole story on how classpath parsing works: if you want to know, start at sun.misc.Launcher,
// be sure to stop before you tear out your eyes. we just handle the "alternative" filename
// specification which java seems to allow, explicitly, right here...
if (element.startsWith("/") && "\\".equals(fileSeparator)) {
// "correct" the entry to become a normal entry
// change to correct file separators
element = element.replace("/", "\\");
// if there is a drive letter, nuke the leading separator
if (element.length() >= 3 && element.charAt(2) == ':') {
element = element.substring(1);
}
}
// now just parse as ordinary file
try {
urlElements[i] = PathUtils.get(element).toUri().toURL();
} catch (MalformedURLException e) {
// should not happen, as we use the filesystem API
throw new RuntimeException(e);
}
}
return urlElements;
}
/**
* Checks the set of URLs for duplicate classes
* @throws IllegalStateException if jar hell was found
*/
@SuppressForbidden(reason = "needs JarFile for speed, just reading entries")
public static void checkJarHell(URL urls[]) throws Exception {
ESLogger logger = Loggers.getLogger(JarHell.class);
// we don't try to be sneaky and use deprecated/internal/not portable stuff
// like sun.boot.class.path, and with jigsaw we don't yet have a way to get
// a "list" at all. So just exclude any elements underneath the java home
String javaHome = System.getProperty("java.home");
logger.debug("java.home: {}", javaHome);
final Map<String,Path> clazzes = new HashMap<>(32768);
Set<Path> seenJars = new HashSet<>();
for (final URL url : urls) {
final Path path = PathUtils.get(url.toURI());
// exclude system resources
if (path.startsWith(javaHome)) {
logger.debug("excluding system resource: {}", path);
continue;
}
if (path.toString().endsWith(".jar")) {
if (!seenJars.add(path)) {
logger.debug("excluding duplicate classpath element: {}", path);
continue; // we can't fail because of sheistiness with joda-time
}
logger.debug("examining jar: {}", path);
try (JarFile file = new JarFile(path.toString())) {
Manifest manifest = file.getManifest();
if (manifest != null) {
checkManifest(manifest, path);
}
// inspect entries
Enumeration<JarEntry> elements = file.entries();
while (elements.hasMoreElements()) {
String entry = elements.nextElement().getName();
if (entry.endsWith(".class")) {
// for jar format, the separator is defined as /
entry = entry.replace('/', '.').substring(0, entry.length() - 6);
checkClass(clazzes, entry, path);
}
}
}
} else {
logger.debug("examining directory: {}", path);
// case for tests: where we have class files in the classpath
final Path root = PathUtils.get(url.toURI());
final String sep = root.getFileSystem().getSeparator();
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
String entry = root.relativize(file).toString();
if (entry.endsWith(".class")) {
// normalize with the os separator
entry = entry.replace(sep, ".").substring(0, entry.length() - 6);
checkClass(clazzes, entry, path);
}
return super.visitFile(file, attrs);
}
});
}
}
}
/** inspect manifest for sure incompatibilities */
static void checkManifest(Manifest manifest, Path jar) {
// give a nice error if jar requires a newer java version
String targetVersion = manifest.getMainAttributes().getValue("X-Compile-Target-JDK");
if (targetVersion != null) {
checkVersionFormat(targetVersion);
checkJavaVersion(jar.toString(), targetVersion);
}
// give a nice error if jar is compiled against different es version
String systemESVersion = Version.CURRENT.toString();
String targetESVersion = manifest.getMainAttributes().getValue("X-Compile-Elasticsearch-Version");
if (targetESVersion != null && targetESVersion.equals(systemESVersion) == false) {
throw new IllegalStateException(jar + " requires Elasticsearch " + targetESVersion
+ ", your system: " + systemESVersion);
}
}
public static void checkVersionFormat(String targetVersion) {
if (!JavaVersion.isValid(targetVersion)) {
throw new IllegalStateException(
String.format(
Locale.ROOT,
"version string must be a sequence of nonnegative decimal integers separated by \".\"'s and may have leading zeros but was %s",
targetVersion
)
);
}
}
/**
* Checks that the java specification version {@code targetVersion}
* required by {@code resource} is compatible with the current installation.
*/
public static void checkJavaVersion(String resource, String targetVersion) {
JavaVersion version = JavaVersion.parse(targetVersion);
if (JavaVersion.current().compareTo(version) < 0) {
throw new IllegalStateException(
String.format(
Locale.ROOT,
"%s requires Java %s:, your system: %s",
resource,
targetVersion,
JavaVersion.current().toString()
)
);
}
}
static void checkClass(Map<String,Path> clazzes, String clazz, Path jarpath) {
Path previous = clazzes.put(clazz, jarpath);
if (previous != null) {
if (previous.equals(jarpath)) {
if (clazz.startsWith("org.apache.xmlbeans")) {
return; // https://issues.apache.org/jira/browse/XMLBEANS-499
}
// throw a better exception in this ridiculous case.
// unfortunately the zip file format allows this buggy possibility
// UweSays: It can, but should be considered as bug :-)
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
"class: " + clazz + System.lineSeparator() +
"exists multiple times in jar: " + jarpath + " !!!!!!!!!");
} else {
if (clazz.startsWith("org.apache.log4j") || clazz.startsWith("org.slf4j.impl")) {
return; // go figure, jar hell for what should be System.out.println...
}
if (clazz.equals("org.joda.time.base.BaseDateTime")) {
return; // apparently this is intentional... clean this up
}
if (clazz.startsWith("org.apache.lucene.util.LuceneTestCase")) {
return; // for modified version of LuceneTestCase to ignore cassandra static variables leaks.
}
// workaround for cassandra thrift
if (clazz.startsWith("org.apache.cassandra.thrift")) {
return; // Because org.apache.commons.collections.FastHashMap in commons-collections and commons-beanutils
}
// workaround for hadoop
if (clazz.startsWith("org.apache.commons")) {
return; // Because org.apache.commons.collections.FastHashMap in commons-collections and commons-beanutils
}
if (clazz.startsWith("org.apache.jasper")) {
return; // Because of Hadoop
}
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
"class: " + clazz + System.lineSeparator() +
"jar1: " + previous + System.lineSeparator() +
"jar2: " + jarpath);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.integration.xa;
import javax.management.MBeanServer;
import javax.management.MBeanServerFactory;
import javax.transaction.xa.XAResource;
import javax.transaction.xa.Xid;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.client.ClientConsumer;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientProducer;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.ClientSessionFactory;
import org.apache.activemq.artemis.api.core.client.ServerLocator;
import org.apache.activemq.artemis.api.core.management.QueueControl;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.core.config.StoreConfiguration;
import org.apache.activemq.artemis.core.server.ActiveMQServer;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.core.transaction.impl.XidImpl;
import org.apache.activemq.artemis.jms.client.ActiveMQBytesMessage;
import org.apache.activemq.artemis.jms.client.ActiveMQTextMessage;
import org.apache.activemq.artemis.tests.integration.IntegrationTestLogger;
import org.apache.activemq.artemis.tests.integration.management.ManagementControlHelper;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.apache.activemq.artemis.utils.UUIDGenerator;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class BasicXaRecoveryTest extends ActiveMQTestBase {
private static IntegrationTestLogger log = IntegrationTestLogger.LOGGER;
private final Map<String, AddressSettings> addressSettings = new HashMap<>();
private ActiveMQServer server;
private ClientSession clientSession;
private ClientProducer clientProducer;
private ClientConsumer clientConsumer;
private ClientSessionFactory sessionFactory;
private Configuration configuration;
private final SimpleString atestq = new SimpleString("atestq");
private ServerLocator locator;
private MBeanServer mbeanServer;
protected StoreConfiguration.StoreType storeType;
public BasicXaRecoveryTest(StoreConfiguration.StoreType storeType) {
this.storeType = storeType;
}
@Parameterized.Parameters(name = "storeType={0}")
public static Collection<Object[]> data() {
Object[][] params = new Object[][]{{StoreConfiguration.StoreType.FILE}, {StoreConfiguration.StoreType.DATABASE}};
return Arrays.asList(params);
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
if (storeType == StoreConfiguration.StoreType.DATABASE) {
Class.forName("org.apache.derby.jdbc.EmbeddedDriver").newInstance();
}
addressSettings.clear();
if (storeType == StoreConfiguration.StoreType.DATABASE) {
configuration = createDefaultJDBCConfig(true).setJMXManagementEnabled(true);
} else {
configuration = createDefaultInVMConfig().setJMXManagementEnabled(true);
}
mbeanServer = MBeanServerFactory.createMBeanServer();
server = createServer(true, configuration, -1, -1, addressSettings);
server.setMBeanServer(mbeanServer);
// start the server
server.start();
// then we create a client as normal
createClients(true, false);
}
@Override
@After
public void tearDown() throws Exception {
MBeanServerFactory.releaseMBeanServer(mbeanServer);
super.tearDown();
if (storeType == StoreConfiguration.StoreType.DATABASE) {
destroyTables(Arrays.asList("BINDINGS", "LARGE_MESSAGE", "MESSAGE", "NODE_MANAGER_STORE"));
}
}
@Test
public void testBasicSendWithCommit() throws Exception {
testBasicSendWithCommit(false);
}
@Test
public void testBasicSendWithCommitWithServerStopped() throws Exception {
testBasicSendWithCommit(true);
}
@Test
public void testBasicSendWithRollback() throws Exception {
testBasicSendWithRollback(false);
}
@Test
public void testBasicSendWithRollbackWithServerStopped() throws Exception {
testBasicSendWithRollback(true);
}
@Test
public void testMultipleBeforeSendWithCommit() throws Exception {
testMultipleBeforeSendWithCommit(false);
}
@Test
public void testMultipleBeforeSendWithCommitWithServerStopped() throws Exception {
testMultipleBeforeSendWithCommit(true);
}
@Test
public void testMultipleTxSendWithCommit() throws Exception {
testMultipleTxSendWithCommit(false);
}
@Test
public void testMultipleTxSendWithCommitWithServerStopped() throws Exception {
testMultipleTxSendWithCommit(true);
}
@Test
public void testMultipleTxSendWithRollback() throws Exception {
testMultipleTxSendWithRollback(false);
}
@Test
public void testMultipleTxSendWithRollbackWithServerStopped() throws Exception {
testMultipleTxSendWithRollback(true);
}
@Test
public void testMultipleTxSendWithCommitAndRollback() throws Exception {
testMultipleTxSendWithCommitAndRollback(false);
}
@Test
public void testMultipleTxSendWithCommitAndRollbackWithServerStopped() throws Exception {
testMultipleTxSendWithCommitAndRollback(true);
}
@Test
public void testMultipleTxSameXidSendWithCommit() throws Exception {
testMultipleTxSameXidSendWithCommit(false);
}
@Test
public void testMultipleTxSameXidSendWithCommitWithServerStopped() throws Exception {
testMultipleTxSameXidSendWithCommit(true);
}
@Test
public void testBasicReceiveWithCommit() throws Exception {
testBasicReceiveWithCommit(false);
}
@Test
public void testBasicReceiveWithCommitWithServerStopped() throws Exception {
testBasicReceiveWithCommit(true);
}
@Test
public void testBasicReceiveWithRollback() throws Exception {
testBasicReceiveWithRollback(false);
}
@Test
public void testBasicReceiveWithRollbackWithServerStopped() throws Exception {
testBasicReceiveWithRollback(true);
}
@Test
public void testMultipleTxReceiveWithCommit() throws Exception {
testMultipleTxReceiveWithCommit(false);
}
@Test
public void testMultipleTxReceiveWithCommitWithServerStopped() throws Exception {
testMultipleTxReceiveWithCommit(true);
}
@Test
public void testMultipleTxReceiveWithRollback() throws Exception {
testMultipleTxReceiveWithRollback(false);
}
@Test
public void testMultipleTxReceiveWithRollbackWithServerStopped() throws Exception {
testMultipleTxReceiveWithRollback(true);
}
@Test
public void testPagingServerRestarted() throws Exception {
if (storeType == StoreConfiguration.StoreType.DATABASE)
return;
verifyPaging(true);
}
@Test
public void testPaging() throws Exception {
if (storeType == StoreConfiguration.StoreType.DATABASE)
return;
verifyPaging(false);
}
public void verifyPaging(final boolean restartServer) throws Exception {
if (storeType == StoreConfiguration.StoreType.DATABASE)
return;
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
SimpleString pageQueue = new SimpleString("pagequeue");
AddressSettings pageAddressSettings = new AddressSettings().setMaxSizeBytes(100 * 1024).setPageSizeBytes(10 * 1024);
addressSettings.put(pageQueue.toString(), pageAddressSettings);
addSettings();
clientSession.createQueue(pageQueue, pageQueue, null, true);
clientSession.start(xid, XAResource.TMNOFLAGS);
ClientProducer pageProducer = clientSession.createProducer(pageQueue);
for (int i = 0; i < 1000; i++) {
ClientMessage m = createBytesMessage(new byte[512], true);
pageProducer.send(m);
}
pageProducer.close();
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
BasicXaRecoveryTest.log.info("*** stopping and restarting");
if (restartServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 1);
Assert.assertEquals(xids[0].getFormatId(), xid.getFormatId());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getBranchQualifier(), xid.getBranchQualifier());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getGlobalTransactionId(), xid.getGlobalTransactionId());
clientSession.commit(xid, false);
clientSession.close();
clientSession = sessionFactory.createSession(false, false, false);
clientSession.start();
ClientConsumer pageConsumer = clientSession.createConsumer(pageQueue);
for (int i = 0; i < 1000; i++) {
ClientMessage m = pageConsumer.receive(10000);
Assert.assertNotNull(m);
m.acknowledge();
clientSession.commit();
}
Assert.assertNull(pageConsumer.receiveImmediate());
}
@Test
public void testRollbackPaging() throws Exception {
if (storeType == StoreConfiguration.StoreType.DATABASE)
return;
testRollbackPaging(false);
}
@Test
public void testRollbackPagingServerRestarted() throws Exception {
if (storeType == StoreConfiguration.StoreType.DATABASE)
return;
testRollbackPaging(true);
}
public void testRollbackPaging(final boolean restartServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
SimpleString pageQueue = new SimpleString("pagequeue");
AddressSettings pageAddressSettings = new AddressSettings().setMaxSizeBytes(100 * 1024).setPageSizeBytes(10 * 1024);
addressSettings.put(pageQueue.toString(), pageAddressSettings);
addSettings();
clientSession.createQueue(pageQueue, pageQueue, null, true);
clientSession.start(xid, XAResource.TMNOFLAGS);
ClientProducer pageProducer = clientSession.createProducer(pageQueue);
for (int i = 0; i < 1000; i++) {
ClientMessage m = createBytesMessage(new byte[512], true);
pageProducer.send(m);
}
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
if (restartServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(1, xids.length);
Assert.assertEquals(xids[0].getFormatId(), xid.getFormatId());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getBranchQualifier(), xid.getBranchQualifier());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getGlobalTransactionId(), xid.getGlobalTransactionId());
clientSession.rollback(xid);
clientSession.start();
ClientConsumer pageConsumer = clientSession.createConsumer(pageQueue);
Assert.assertNull(pageConsumer.receiveImmediate());
// Management message (from createQueue) will not be taken into account again as it is nonPersistent
}
@Test
public void testNonPersistent() throws Exception {
testNonPersistent(true);
testNonPersistent(false);
}
public void testNonPersistent(final boolean commit) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1", false);
ClientMessage m2 = createTextMessage("m2", false);
ClientMessage m3 = createTextMessage("m3", false);
ClientMessage m4 = createTextMessage("m4", false);
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m1);
clientProducer.send(m2);
clientProducer.send(m3);
clientProducer.send(m4);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
stopAndRestartServer();
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 1);
Assert.assertEquals(xids[0].getFormatId(), xid.getFormatId());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getBranchQualifier(), xid.getBranchQualifier());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getGlobalTransactionId(), xid.getGlobalTransactionId());
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
if (commit) {
clientSession.commit(xid, false);
} else {
clientSession.rollback(xid);
}
xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 0);
}
@Test
public void testNonPersistentMultipleIDs() throws Exception {
for (int i = 0; i < 10; i++) {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1", false);
ClientMessage m2 = createTextMessage("m2", false);
ClientMessage m3 = createTextMessage("m3", false);
ClientMessage m4 = createTextMessage("m4", false);
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m1);
clientProducer.send(m2);
clientProducer.send(m3);
clientProducer.send(m4);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
if (i == 2) {
clientSession.commit(xid, false);
}
recreateClients();
}
stopAndRestartServer();
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(9, xids.length);
}
public void testBasicSendWithCommit(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m1);
clientProducer.send(m2);
clientProducer.send(m3);
clientProducer.send(m4);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 1);
Assert.assertEquals(xids[0].getFormatId(), xid.getFormatId());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getBranchQualifier(), xid.getBranchQualifier());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getGlobalTransactionId(), xid.getGlobalTransactionId());
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.commit(xid, false);
clientSession.start();
ClientMessage m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
}
public void testBasicSendWithRollback(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m1);
clientProducer.send(m2);
clientProducer.send(m3);
clientProducer.send(m4);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
BasicXaRecoveryTest.log.info("shutting down server");
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
BasicXaRecoveryTest.log.info("restarted");
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 1);
Assert.assertEquals(xids[0].getFormatId(), xid.getFormatId());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getBranchQualifier(), xid.getBranchQualifier());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getGlobalTransactionId(), xid.getGlobalTransactionId());
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.rollback(xid);
clientSession.start();
ClientMessage m = clientConsumer.receiveImmediate();
Assert.assertNull(m);
}
public void testMultipleBeforeSendWithCommit(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
ClientMessage m5 = createTextMessage("m5");
ClientMessage m6 = createTextMessage("m6");
ClientMessage m7 = createTextMessage("m7");
ClientMessage m8 = createTextMessage("m8");
ClientSession clientSession2 = sessionFactory.createSession(false, false, true);
ClientProducer clientProducer2 = clientSession2.createProducer(atestq);
clientProducer2.send(m1);
clientProducer2.send(m2);
clientProducer2.send(m3);
clientProducer2.send(m4);
clientSession2.close();
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m5);
clientProducer.send(m6);
clientProducer.send(m7);
clientProducer.send(m8);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 1);
Assert.assertEquals(xids[0].getFormatId(), xid.getFormatId());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getBranchQualifier(), xid.getBranchQualifier());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getGlobalTransactionId(), xid.getGlobalTransactionId());
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.commit(xid, false);
clientSession.start();
ClientMessage m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m5");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m6");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m7");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m8");
}
public void testMultipleTxSendWithCommit(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
Xid xid2 = new XidImpl("xa2".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
ClientMessage m5 = createTextMessage("m5");
ClientMessage m6 = createTextMessage("m6");
ClientMessage m7 = createTextMessage("m7");
ClientMessage m8 = createTextMessage("m8");
ClientSession clientSession2 = sessionFactory.createSession(true, false, true);
ClientProducer clientProducer2 = clientSession2.createProducer(atestq);
clientSession2.start(xid2, XAResource.TMNOFLAGS);
clientProducer2.send(m1);
clientProducer2.send(m2);
clientProducer2.send(m3);
clientProducer2.send(m4);
clientSession2.end(xid2, XAResource.TMSUCCESS);
clientSession2.prepare(xid2);
clientSession2.close();
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m5);
clientProducer.send(m6);
clientProducer.send(m7);
clientProducer.send(m8);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 2);
assertEqualXids(xids, xid, xid2);
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.commit(xid, false);
clientSession.commit(xid2, false);
clientSession.start();
ClientMessage m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m5");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m6");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m7");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m8");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
}
public void testMultipleTxSendWithRollback(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
Xid xid2 = new XidImpl("xa2".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
ClientMessage m5 = createTextMessage("m5");
ClientMessage m6 = createTextMessage("m6");
ClientMessage m7 = createTextMessage("m7");
ClientMessage m8 = createTextMessage("m8");
ClientSession clientSession2 = sessionFactory.createSession(true, false, true);
ClientProducer clientProducer2 = clientSession2.createProducer(atestq);
clientSession2.start(xid2, XAResource.TMNOFLAGS);
clientProducer2.send(m1);
clientProducer2.send(m2);
clientProducer2.send(m3);
clientProducer2.send(m4);
clientSession2.end(xid2, XAResource.TMSUCCESS);
clientSession2.prepare(xid2);
clientSession2.close();
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m5);
clientProducer.send(m6);
clientProducer.send(m7);
clientProducer.send(m8);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 2);
assertEqualXids(xids, xid, xid2);
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.rollback(xid);
clientSession.rollback(xid2);
clientSession.start();
ClientMessage m = clientConsumer.receiveImmediate();
Assert.assertNull(m);
}
public void testMultipleTxSendWithCommitAndRollback(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
Xid xid2 = new XidImpl("xa2".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
ClientMessage m5 = createTextMessage("m5");
ClientMessage m6 = createTextMessage("m6");
ClientMessage m7 = createTextMessage("m7");
ClientMessage m8 = createTextMessage("m8");
ClientSession clientSession2 = sessionFactory.createSession(true, false, true);
ClientProducer clientProducer2 = clientSession2.createProducer(atestq);
clientSession2.start(xid2, XAResource.TMNOFLAGS);
clientProducer2.send(m1);
clientProducer2.send(m2);
clientProducer2.send(m3);
clientProducer2.send(m4);
clientSession2.end(xid2, XAResource.TMSUCCESS);
clientSession2.prepare(xid2);
clientSession2.close();
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m5);
clientProducer.send(m6);
clientProducer.send(m7);
clientProducer.send(m8);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 2);
assertEqualXids(xids, xid, xid2);
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.rollback(xid);
clientSession.commit(xid2, false);
clientSession.start();
ClientMessage m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
m = clientConsumer.receiveImmediate();
Assert.assertNull(m);
}
public void testMultipleTxSameXidSendWithCommit(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
ClientMessage m5 = createTextMessage("m5");
ClientMessage m6 = createTextMessage("m6");
ClientMessage m7 = createTextMessage("m7");
ClientMessage m8 = createTextMessage("m8");
ClientSession clientSession2 = sessionFactory.createSession(true, false, true);
ClientProducer clientProducer2 = clientSession2.createProducer(atestq);
clientSession2.start(xid, XAResource.TMNOFLAGS);
clientProducer2.send(m1);
clientProducer2.send(m2);
clientProducer2.send(m3);
clientProducer2.send(m4);
clientSession2.end(xid, XAResource.TMSUCCESS);
clientSession2.close();
clientSession.start(xid, XAResource.TMJOIN);
clientProducer.send(m5);
clientProducer.send(m6);
clientProducer.send(m7);
clientProducer.send(m8);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 1);
Assert.assertEquals(xids[0].getFormatId(), xid.getFormatId());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getBranchQualifier(), xid.getBranchQualifier());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getGlobalTransactionId(), xid.getGlobalTransactionId());
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.commit(xid, false);
clientSession.start();
ClientMessage m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m5");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m6");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m7");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m8");
}
public void testBasicReceiveWithCommit(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
ClientSession clientSession2 = sessionFactory.createSession(false, true, true);
ClientProducer clientProducer2 = clientSession2.createProducer(atestq);
clientProducer2.send(m1);
clientProducer2.send(m2);
clientProducer2.send(m3);
clientProducer2.send(m4);
clientSession2.close();
clientSession.start(xid, XAResource.TMNOFLAGS);
clientSession.start();
ClientMessage m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
clientSession.end(xid, XAResource.TMSUCCESS);
Assert.assertEquals("Expected XA_OK", XAResource.XA_OK, clientSession.prepare(xid));
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(xids.length, 1);
Assert.assertEquals(xids[0].getFormatId(), xid.getFormatId());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getBranchQualifier(), xid.getBranchQualifier());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getGlobalTransactionId(), xid.getGlobalTransactionId());
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.commit(xid, false);
clientSession.start();
m = clientConsumer.receiveImmediate();
Assert.assertNull(m);
//check deliveringCount Zero
checkQueueDeliveryCount(atestq, 0);
}
private void checkQueueDeliveryCount(SimpleString thequeue, int expectedCount) throws Exception {
QueueControl queueControl = ManagementControlHelper.createQueueControl(thequeue, thequeue, mbeanServer);
int actualCount = queueControl.getDeliveringCount();
assertEquals(expectedCount, actualCount);
}
public void testBasicReceiveWithRollback(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
ClientSession clientSession2 = sessionFactory.createSession(false, true, true);
ClientProducer clientProducer2 = clientSession2.createProducer(atestq);
clientProducer2.send(m1);
clientProducer2.send(m2);
clientProducer2.send(m3);
clientProducer2.send(m4);
clientSession2.close();
clientSession.start(xid, XAResource.TMNOFLAGS);
clientSession.start();
ClientMessage m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
BasicXaRecoveryTest.log.info("stopping and restarting");
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
BasicXaRecoveryTest.log.info("Restarted");
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(1, xids.length);
Assert.assertEquals(xids[0].getFormatId(), xid.getFormatId());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getBranchQualifier(), xid.getBranchQualifier());
ActiveMQTestBase.assertEqualsByteArrays(xids[0].getGlobalTransactionId(), xid.getGlobalTransactionId());
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.rollback(xid);
clientSession.start();
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
}
public void testMultipleTxReceiveWithCommit(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
Xid xid2 = new XidImpl("xa2".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
ClientMessage m5 = createTextMessage("m5");
ClientMessage m6 = createTextMessage("m6");
ClientMessage m7 = createTextMessage("m7");
ClientMessage m8 = createTextMessage("m8");
ClientSession clientSession2 = sessionFactory.createSession(false, true, true);
ClientProducer clientProducer2 = clientSession2.createProducer(atestq);
SimpleString anewtestq = new SimpleString("anewtestq");
clientSession.createQueue(anewtestq, anewtestq, null, true);
ClientProducer clientProducer3 = clientSession2.createProducer(anewtestq);
clientProducer2.send(m1);
clientProducer2.send(m2);
clientProducer2.send(m3);
clientProducer2.send(m4);
clientProducer3.send(m5);
clientProducer3.send(m6);
clientProducer3.send(m7);
clientProducer3.send(m8);
clientSession2.close();
clientSession2 = sessionFactory.createSession(true, false, false);
ClientConsumer clientConsumer2 = clientSession2.createConsumer(anewtestq);
clientSession2.start(xid2, XAResource.TMNOFLAGS);
clientSession2.start();
ClientMessage m = clientConsumer2.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m5");
m = clientConsumer2.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m6");
m = clientConsumer2.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m7");
m = clientConsumer2.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m8");
clientSession2.end(xid2, XAResource.TMSUCCESS);
clientSession2.prepare(xid2);
clientSession2.close();
clientSession2 = null;
clientSession.start(xid, XAResource.TMNOFLAGS);
clientSession.start();
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
assertEqualXids(xids, xid, xid2);
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.commit(xid, false);
clientSession.start();
m = clientConsumer.receiveImmediate();
Assert.assertNull(m);
}
public void testMultipleTxReceiveWithRollback(final boolean stopServer) throws Exception {
Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
Xid xid2 = new XidImpl("xa2".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
ClientMessage m1 = createTextMessage("m1");
ClientMessage m2 = createTextMessage("m2");
ClientMessage m3 = createTextMessage("m3");
ClientMessage m4 = createTextMessage("m4");
ClientMessage m5 = createTextMessage("m5");
ClientMessage m6 = createTextMessage("m6");
ClientMessage m7 = createTextMessage("m7");
ClientMessage m8 = createTextMessage("m8");
ClientSession clientSession2 = sessionFactory.createSession(false, true, true);
ClientProducer clientProducer2 = clientSession2.createProducer(atestq);
SimpleString anewtestq = new SimpleString("anewtestq");
clientSession.createQueue(anewtestq, anewtestq, null, true);
ClientProducer clientProducer3 = clientSession2.createProducer(anewtestq);
clientProducer2.send(m1);
clientProducer2.send(m2);
clientProducer2.send(m3);
clientProducer2.send(m4);
clientProducer3.send(m5);
clientProducer3.send(m6);
clientProducer3.send(m7);
clientProducer3.send(m8);
clientSession2.close();
clientSession2 = sessionFactory.createSession(true, false, false);
ClientConsumer clientConsumer2 = clientSession2.createConsumer(anewtestq);
clientSession2.start(xid2, XAResource.TMNOFLAGS);
clientSession2.start();
ClientMessage m = clientConsumer2.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m5");
m = clientConsumer2.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m6");
m = clientConsumer2.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m7");
m = clientConsumer2.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m8");
clientSession2.end(xid2, XAResource.TMSUCCESS);
clientSession2.prepare(xid2);
clientSession2.close();
clientSession2 = null;
clientSession.start(xid, XAResource.TMNOFLAGS);
clientSession.start();
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
if (stopServer) {
stopAndRestartServer();
} else {
recreateClients();
}
Xid[] xids = clientSession.recover(XAResource.TMSTARTRSCAN);
assertEqualXids(xids, xid, xid2);
xids = clientSession.recover(XAResource.TMENDRSCAN);
Assert.assertEquals(xids.length, 0);
clientSession.rollback(xid);
clientSession.start();
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
m.acknowledge();
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
}
protected void stopAndRestartServer() throws Exception {
// now stop and start the server
clientSession.close();
clientSession = null;
server.stop();
server = null;
server = createServer(true, configuration, -1, -1, addressSettings);
server.setMBeanServer(mbeanServer);
server.start();
createClients();
}
private void addSettings() {
for (Map.Entry<String, AddressSettings> setting : addressSettings.entrySet()) {
server.getAddressSettingsRepository().addMatch(setting.getKey(), setting.getValue());
}
}
protected void recreateClients() throws Exception {
clientSession.close();
clientSession = null;
createClients();
}
private ClientMessage createTextMessage(final String s) {
return createTextMessage(s, true);
}
private ClientMessage createTextMessage(final String s, final boolean durable) {
ClientMessage message = clientSession.createMessage(ActiveMQTextMessage.TYPE, durable, 0, System.currentTimeMillis(), (byte) 1);
message.getBodyBuffer().writeString(s);
return message;
}
private ClientMessage createBytesMessage(final byte[] b, final boolean durable) {
ClientMessage message = clientSession.createMessage(ActiveMQBytesMessage.TYPE, durable, 0, System.currentTimeMillis(), (byte) 1);
message.getBodyBuffer().writeBytes(b);
return message;
}
private void createClients() throws Exception {
createClients(false, true);
}
private void createClients(final boolean createQueue, final boolean commitACKs) throws Exception {
locator = createInVMNonHALocator();
sessionFactory = createSessionFactory(locator);
clientSession = sessionFactory.createSession(true, false, commitACKs);
if (createQueue) {
clientSession.createQueue(atestq, atestq, null, true);
}
clientProducer = clientSession.createProducer(atestq);
clientConsumer = clientSession.createConsumer(atestq);
}
private void assertEqualXids(final Xid[] xids, final Xid... origXids) {
Assert.assertEquals(xids.length, origXids.length);
for (Xid xid : xids) {
boolean found = false;
for (Xid origXid : origXids) {
found = Arrays.equals(origXid.getBranchQualifier(), xid.getBranchQualifier());
if (found) {
Assert.assertEquals(xid.getFormatId(), origXid.getFormatId());
ActiveMQTestBase.assertEqualsByteArrays(xid.getBranchQualifier(), origXid.getBranchQualifier());
ActiveMQTestBase.assertEqualsByteArrays(xid.getGlobalTransactionId(), origXid.getGlobalTransactionId());
break;
}
}
if (!found) {
Assert.fail("correct xid not found: " + xid);
}
}
}
}
| |
package de.felixbruns.jotify.api.crypto;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.NoSuchAlgorithmException;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.KeySpec;
import java.util.Arrays;
import javax.crypto.KeyAgreement;
import javax.crypto.interfaces.DHKey;
import javax.crypto.interfaces.DHPrivateKey;
import javax.crypto.interfaces.DHPublicKey;
import javax.crypto.spec.DHParameterSpec;
import javax.crypto.spec.DHPrivateKeySpec;
import javax.crypto.spec.DHPublicKeySpec;
/**
* Class providing convenience methods for generating Diffie-Hellman
* key pairs and computing shared keys.
*
* @author Felix Bruns <felixbruns@web.de>
*/
public class DH {
/**
* {@link KeyPairGenerator} object for creating new key pairs.
*/
private static KeyPairGenerator keyPairGenerator;
/**
* {@link KeyAgreement} object for computing shared keys.
*/
private static KeyAgreement keyAgreement;
/**
* {@link KeyFactory} object for creating keys from bytes.
*/
private static KeyFactory keyFactory;
/**
* {@link DH} instance for creating new {@link DHKeyPair} objects.
*/
private static DH instance;
/**
* Generator to use for key generation.
*/
private static BigInteger generator = new BigInteger("2");
/**
* Prime number to use for key generation.
* Well-known Group 1, 768-bit prime.
*/
private static BigInteger prime = bytesToBigInteger(new byte[]{
(byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff,
(byte)0xc9, (byte)0x0f, (byte)0xda, (byte)0xa2, (byte)0x21, (byte)0x68, (byte)0xc2, (byte)0x34,
(byte)0xc4, (byte)0xc6, (byte)0x62, (byte)0x8b, (byte)0x80, (byte)0xdc, (byte)0x1c, (byte)0xd1,
(byte)0x29, (byte)0x02, (byte)0x4e, (byte)0x08, (byte)0x8a, (byte)0x67, (byte)0xcc, (byte)0x74,
(byte)0x02, (byte)0x0b, (byte)0xbe, (byte)0xa6, (byte)0x3b, (byte)0x13, (byte)0x9b, (byte)0x22,
(byte)0x51, (byte)0x4a, (byte)0x08, (byte)0x79, (byte)0x8e, (byte)0x34, (byte)0x04, (byte)0xdd,
(byte)0xef, (byte)0x95, (byte)0x19, (byte)0xb3, (byte)0xcd, (byte)0x3a, (byte)0x43, (byte)0x1b,
(byte)0x30, (byte)0x2b, (byte)0x0a, (byte)0x6d, (byte)0xf2, (byte)0x5f, (byte)0x14, (byte)0x37,
(byte)0x4f, (byte)0xe1, (byte)0x35, (byte)0x6d, (byte)0x6d, (byte)0x51, (byte)0xc2, (byte)0x45,
(byte)0xe4, (byte)0x85, (byte)0xb5, (byte)0x76, (byte)0x62, (byte)0x5e, (byte)0x7e, (byte)0xc6,
(byte)0xf4, (byte)0x4c, (byte)0x42, (byte)0xe9, (byte)0xa6, (byte)0x3a, (byte)0x36, (byte)0x20,
(byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff
});
/**
* Statically instantiate needed objects and create a class instance.
*/
static{
try{
keyPairGenerator = KeyPairGenerator.getInstance("DH");
keyAgreement = KeyAgreement.getInstance("DH");
keyFactory = KeyFactory.getInstance("DH");
}
catch(NoSuchAlgorithmException e){
throw new RuntimeException(e);
}
/* Create DH instance for creating new DHKeyPair objects. */
instance = new DH();
}
/**
* Generate a key pair with the specified key size.
*
* @param keysize The key size in bits.
*
* @return A {@link DHKeyPair} holding Diffie-Hellman private and public keys.
*/
public static DHKeyPair generateKeyPair(int keysize){
/* Check if key pair generator is instantiated. */
if(keyPairGenerator == null){
throw new RuntimeException("KeyPairGenerator not instantiated!");
}
/* Initialize key pair generator with prime, generator and keysize in bits. */
try{
keyPairGenerator.initialize(
new DHParameterSpec(prime, generator, keysize)
);
}
catch(InvalidAlgorithmParameterException e){
throw new RuntimeException(e);
}
/* Generate key pair. */
KeyPair keyPair = keyPairGenerator.generateKeyPair();
/* Return key pair. */
return instance.new DHKeyPair(keyPair);
}
/**
* Compute a shared key using a private and a public key.
*
* @param privateKey A {@link DHPrivateKey} object.
* @param publicKey A {@link DHPublicKey} object.
*
* @return The shared key as a byte array.
*/
public static byte[] computeSharedKey(DHPrivateKey privateKey, DHPublicKey publicKey){
/* Check if key agreement is instantiated. */
if(keyAgreement == null){
throw new RuntimeException("KeyAgreement not instantiated!");
}
/* Initialize key agreement with private key and execute next phase with public key. */
try{
keyAgreement.init(privateKey);
keyAgreement.doPhase(publicKey, true);
}
catch(InvalidKeyException e){
throw new RuntimeException(e);
}
/* Return shared key. */
return keyAgreement.generateSecret();
}
/**
* Convert a byte array to a {@link BigInteger}.
* Adds a leading zero-byte to ensure a positive {@link BigInteger}.
*
* @param bytes The byte array to convert.
*
* @return A {@link BigInteger} object.
*/
public static BigInteger bytesToBigInteger(byte[] bytes){
/* Pad with 0x00 so we don't get a negative BigInteger!!! */
ByteBuffer key = ByteBuffer.allocate(bytes.length + 1);
key.put((byte)0x00);
key.put(bytes);
return new BigInteger(key.array());
}
/**
* Convert a {@link DHKey} to a byte array. Uses X or Y values
* of a key depending on key type (private or public). Cuts-off
* a leading zero-byte if key length is not divisible by 8.
*
* @param key The {@link DHKey} to convert.
*
* @return A byte array representation of the key or {@code null}.
*/
public static byte[] keyToBytes(DHKey key){
byte[] bytes = null;
/* Check key type and use appropriate value. */
if(key instanceof DHPublicKey){
bytes = ((DHPublicKey)key).getY().toByteArray();
}
else if(key instanceof DHPrivateKey){
bytes = ((DHPrivateKey)key).getX().toByteArray();
}
/* Return null on failure. */
if(bytes == null){
return null;
}
/* Cut-off leading zero-byte if key length is not divisible by 8. */
if(bytes.length % 8 != 0 && bytes[0] == 0x00){
bytes = Arrays.copyOfRange(bytes, 1, bytes.length);
}
return bytes;
}
/**
* Create a {@link DHPublicKey} from a byte array.
*
* @param parameterSpec The {@link DHParameterSpec} to use.
* @param bytes The key bytes.
*
* @return A {@link DHPublicKey} object.
*/
public static DHPublicKey bytesToPublicKey(DHParameterSpec parameterSpec, byte[] bytes){
/* Set Y (public key), P and G values. */
KeySpec keySpec = new DHPublicKeySpec(
bytesToBigInteger(bytes),
parameterSpec.getP(),
parameterSpec.getG()
);
/* Generate public key from key spec */
try{
return (DHPublicKey)keyFactory.generatePublic(keySpec);
}
catch(InvalidKeySpecException e){
throw new RuntimeException(e);
}
}
/**
* Create a {@link DHPrivateKey} from a byte array.
*
* @param parameterSpec The {@link DHParameterSpec} to use.
* @param bytes The key bytes.
*
* @return A {@link DHPrivateKey} object.
*/
public static DHPrivateKey bytesToPrivateKey(DHParameterSpec parameterSpec, byte[] bytes){
/* Set X (private key), P and G values. */
KeySpec keySpec = new DHPrivateKeySpec(
bytesToBigInteger(bytes),
parameterSpec.getP(),
parameterSpec.getG()
);
/* Generate private key from key spec */
try{
return (DHPrivateKey)keyFactory.generatePrivate(keySpec);
}
catch(InvalidKeySpecException e){
throw new RuntimeException(e);
}
}
/**
* A class holding Diffie-Hellman private and public keys.
*
* @author Felix
*
* @category Crypto
*/
public class DHKeyPair {
/**
* A {@link DHPrivateKey}.
*/
private DHPrivateKey privateKey;
/**
* A {@link DHPublicKey}.
*/
private DHPublicKey publicKey;
/**
* Create a new {@link DHKeyPair} using a private and public key.
*
* @param privateKey The private key.
* @param publicKey The public key.
*/
public DHKeyPair(DHPrivateKey privateKey, DHPublicKey publicKey){
this.privateKey = privateKey;
this.publicKey = publicKey;
}
/**
* Create a new {@link DHKeyPair} using a {@link KeyPair}.
*
* @param keyPair The {@link KeyPair} object.
*/
public DHKeyPair(KeyPair keyPair){
this((DHPrivateKey)keyPair.getPrivate(), (DHPublicKey)keyPair.getPublic());
}
/**
* Get the private key.
*
* @return A {@link DHPrivateKey} object.
*/
public DHPrivateKey getPrivateKey(){
return this.privateKey;
}
/**
* Get the private key as a byte array.
*
* @return A byte array representation of the private key.
*/
public byte[] getPrivateKeyBytes(){
return keyToBytes(this.privateKey);
}
/**
* Get the public key.
*
* @return A {@link DHPublicKey} object.
*/
public DHPublicKey getPublicKey(){
return this.publicKey;
}
/**
* Get the public key as a byte array.
*
* @return A byte array representation of the public key.
*/
public byte[] getPublicKeyBytes(){
return keyToBytes(this.publicKey);
}
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.apache.drill.exec.physical.impl.window;
import java.util.Properties;
import org.apache.drill.BaseTestQuery;
import org.apache.drill.DrillTestWrapper;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.exceptions.UserRemoteException;
import org.apache.drill.common.util.TestTools;
import org.apache.drill.exec.ExecConstants;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestWindowFrame extends BaseTestQuery {
private static final String TEST_RES_PATH = TestTools.getWorkingPath() + "/src/test/resources";
@BeforeClass
public static void setupMSortBatchSize() {
// make sure memory sorter outputs 20 rows per batch
final Properties props = cloneDefaultTestConfigProperties();
props.put(ExecConstants.EXTERNAL_SORT_MSORT_MAX_BATCHSIZE, Integer.toString(20));
updateTestCluster(1, DrillConfig.create(props));
}
private DrillTestWrapper buildWindowQuery(final String tableName, final boolean withPartitionBy, final int numBatches)
throws Exception {
return testBuilder()
.sqlQuery(String.format(getFile("window/q1.sql"), TEST_RES_PATH, tableName, withPartitionBy ? "(partition by position_id)":"()"))
.ordered()
.csvBaselineFile("window/" + tableName + (withPartitionBy ? ".pby" : "") + ".tsv")
.baselineColumns("count", "sum")
.expectsNumBatches(numBatches)
.build();
}
private DrillTestWrapper buildWindowWithOrderByQuery(final String tableName, final boolean withPartitionBy,
final int numBatches) throws Exception {
return testBuilder()
.sqlQuery(String.format(getFile("window/q2.sql"), TEST_RES_PATH, tableName, withPartitionBy ? "(partition by position_id order by sub)" : "(order by sub)"))
.ordered()
.csvBaselineFile("window/" + tableName + (withPartitionBy ? ".pby" : "") + ".oby.tsv")
.baselineColumns("count", "sum", "row_number", "rank", "dense_rank", "cume_dist", "percent_rank")
.expectsNumBatches(numBatches)
.build();
}
private void runTest(final String tableName, final boolean withPartitionBy, final boolean withOrderBy, final int numBatches) throws Exception {
DrillTestWrapper testWrapper = withOrderBy ?
buildWindowWithOrderByQuery(tableName, withPartitionBy, numBatches) : buildWindowQuery(tableName, withPartitionBy, numBatches);
testWrapper.run();
}
private void runTest(final String tableName, final int numBatches) throws Exception {
// we do expect an "extra" empty batch
runTest(tableName, true, true, numBatches + 1);
runTest(tableName, true, false, numBatches + 1);
runTest(tableName, false, true, numBatches + 1);
runTest(tableName, false, false, numBatches + 1);
}
/**
* Single batch with a single partition (position_id column)
*/
@Test
public void testB1P1() throws Exception {
runTest("b1.p1", 1);
}
/**
* Single batch with 2 partitions (position_id column)
*/
@Test
public void testB1P2() throws Exception {
runTest("b1.p2", 1);
}
@Test
public void testMultipleFramers() throws Exception {
final String window = " OVER(PARTITION BY position_id ORDER by sub)";
test("SELECT COUNT(*)"+window+", SUM(salary)"+window+", ROW_NUMBER()"+window+", RANK()"+window+" " +
"FROM dfs_test.`"+TEST_RES_PATH+"/window/b1.p1`"
);
}
/**
* 2 batches with 2 partitions (position_id column), each batch contains a different partition
*/
@Test
public void testB2P2() throws Exception {
runTest("b2.p2", 2);
}
/**
* 2 batches with 4 partitions, one partition has rows in both batches
*/
@Test
public void testB2P4() throws Exception {
runTest("b2.p4", 2);
}
/**
* 3 batches with 2 partitions, one partition has rows in all 3 batches
*/
@Test
public void testB3P2() throws Exception {
runTest("b3.p2", 3);
}
/**
* 4 batches with 4 partitions. After processing 1st batch, when innerNext() is called again, framer can process
* current batch without the need to call next(incoming).
*/
@Test
public void testB4P4() throws Exception {
runTest("b4.p4", 4);
}
@Test // DRILL-1862
public void testEmptyPartitionBy() throws Exception {
test("SELECT employee_id, position_id, salary, SUM(salary) OVER(ORDER BY position_id) FROM cp.`employee.json` LIMIT 10");
}
@Test // DRILL-3172
public void testEmptyOverClause() throws Exception {
test("SELECT employee_id, position_id, salary, SUM(salary) OVER() FROM cp.`employee.json` LIMIT 10");
}
@Test // DRILL-3218
public void testMaxVarChar() throws Exception {
test(getFile("window/q3218.sql"), TEST_RES_PATH);
}
@Test // DRILL-3220
public void testCountConst() throws Exception {
test(getFile("window/q3220.sql"), TEST_RES_PATH);
}
@Test // DRILL-3604
public void testFix3604() throws Exception {
// make sure the query doesn't fail
test(getFile("window/3604.sql"), TEST_RES_PATH);
}
@Test // DRILL-3605
public void testFix3605() throws Exception {
testBuilder()
.sqlQuery(getFile("window/3605.sql"), TEST_RES_PATH)
.ordered()
.csvBaselineFile("window/3605.tsv")
.baselineColumns("col2", "lead_col2")
.build()
.run();
}
@Test // DRILL-3606
public void testFix3606() throws Exception {
testBuilder()
.sqlQuery(getFile("window/3606.sql"), TEST_RES_PATH)
.ordered()
.csvBaselineFile("window/3606.tsv")
.baselineColumns("col2", "lead_col2")
.build()
.run();
}
@Test
public void testLead() throws Exception {
testBuilder()
.sqlQuery(getFile("window/lead.oby.sql"), TEST_RES_PATH)
.ordered()
.csvBaselineFile("window/b4.p4.lead.oby.tsv")
.baselineColumns("lead")
.build()
.run();
}
@Test
public void testLagWithPby() throws Exception {
testBuilder()
.sqlQuery(getFile("window/lag.pby.oby.sql"), TEST_RES_PATH)
.ordered()
.csvBaselineFile("window/b4.p4.lag.pby.oby.tsv")
.baselineColumns("lag")
.build()
.run();
}
@Test
public void testLag() throws Exception {
testBuilder()
.sqlQuery(getFile("window/lag.oby.sql"), TEST_RES_PATH)
.ordered()
.csvBaselineFile("window/b4.p4.lag.oby.tsv")
.baselineColumns("lag")
.build()
.run();
}
@Test
public void testLeadWithPby() throws Exception {
testBuilder()
.sqlQuery(getFile("window/lead.pby.oby.sql"), TEST_RES_PATH)
.ordered()
.csvBaselineFile("window/b4.p4.lead.pby.oby.tsv")
.baselineColumns("lead")
.build()
.run();
}
@Test
public void testFirstValue() throws Exception {
testBuilder()
.sqlQuery(getFile("window/fval.pby.sql"), TEST_RES_PATH)
.ordered()
.csvBaselineFile("window/b4.p4.fval.pby.tsv")
.baselineColumns("first_value")
.build()
.run();
}
@Test
public void testLastValue() throws Exception {
testBuilder()
.sqlQuery(getFile("window/lval.pby.oby.sql"), TEST_RES_PATH)
.ordered()
.csvBaselineFile("window/b4.p4.lval.pby.oby.tsv")
.baselineColumns("last_value")
.build()
.run();
}
@Test
public void testFirstValueAllTypes() throws Exception {
// make sure all types are handled properly
test(getFile("window/fval.alltypes.sql"), TEST_RES_PATH);
}
@Test
public void testLastValueAllTypes() throws Exception {
// make sure all types are handled properly
test(getFile("window/fval.alltypes.sql"), TEST_RES_PATH);
}
@Test
public void testNtile() throws Exception {
testBuilder()
.sqlQuery(getFile("window/ntile.sql"), TEST_RES_PATH)
.ordered()
.csvBaselineFile("window/b2.p4.ntile.tsv")
.baselineColumns("ntile")
.build()
.run();
}
@Test
public void test3648Fix() throws Exception {
testBuilder()
.sqlQuery(getFile("window/3648.sql"), TEST_RES_PATH)
.ordered()
.csvBaselineFile("window/3648.tsv")
.baselineColumns("ntile")
.build()
.run();
}
@Test
public void test3654Fix() throws Exception {
test("SELECT FIRST_VALUE(col8) OVER(PARTITION BY col7 ORDER BY col8) FROM dfs_test.`%s/window/3648.parquet`", TEST_RES_PATH);
}
@Test
public void test3643Fix() throws Exception {
try {
test("SELECT NTILE(0) OVER(PARTITION BY col7 ORDER BY col8) FROM dfs_test.`%s/window/3648.parquet`", TEST_RES_PATH);
fail("Query should have failed");
} catch (UserRemoteException e) {
assertEquals(ErrorType.FUNCTION, e.getErrorType());
}
}
@Test
public void test3668Fix() throws Exception {
testBuilder()
.sqlQuery(getFile("window/3668.sql"), TEST_RES_PATH)
.ordered()
.baselineColumns("cnt").baselineValues(2L)
.build()
.run();
}
@Test
public void testLeadParams() throws Exception {
// make sure we only support default arguments for LEAD/LAG functions
final String query = "SELECT %s OVER(PARTITION BY col7 ORDER BY col8) FROM dfs_test.`%s/window/3648.parquet`";
test(query, "LEAD(col8, 1)", TEST_RES_PATH);
test(query, "LAG(col8, 1)", TEST_RES_PATH);
try {
test(query, "LEAD(col8, 2)", TEST_RES_PATH);
fail("query should fail");
} catch (UserRemoteException e) {
assertEquals(ErrorType.UNSUPPORTED_OPERATION, e.getErrorType());
}
try {
test(query, "LAG(col8, 2)", TEST_RES_PATH);
fail("query should fail");
} catch (UserRemoteException e) {
assertEquals(ErrorType.UNSUPPORTED_OPERATION, e.getErrorType());
}
}
@Test
public void testPartitionNtile() {
Partition partition = new Partition();
partition.updateLength(12, false);
assertEquals(1, partition.ntile(5));
partition.rowAggregated();
assertEquals(1, partition.ntile(5));
partition.rowAggregated();
assertEquals(1, partition.ntile(5));
partition.rowAggregated();
assertEquals(2, partition.ntile(5));
partition.rowAggregated();
assertEquals(2, partition.ntile(5));
partition.rowAggregated();
assertEquals(2, partition.ntile(5));
partition.rowAggregated();
assertEquals(3, partition.ntile(5));
partition.rowAggregated();
assertEquals(3, partition.ntile(5));
partition.rowAggregated();
assertEquals(4, partition.ntile(5));
partition.rowAggregated();
assertEquals(4, partition.ntile(5));
partition.rowAggregated();
assertEquals(5, partition.ntile(5));
partition.rowAggregated();
assertEquals(5, partition.ntile(5));
}
}
| |
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.testing;
import static com.google.common.base.Predicates.and;
import static com.google.common.base.Predicates.not;
import static com.google.common.testing.AbstractPackageSanityTests.Chopper.suffix;
import com.google.common.annotations.Beta;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.reflect.ClassPath;
import com.google.common.testing.NullPointerTester.Visibility;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
import org.junit.Test;
import java.io.IOException;
import java.io.Serializable;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Automatically runs sanity checks against top level classes in the same package of the test that
* extends {@code AbstractPackageSanityTests}. Currently sanity checks include {@link
* NullPointerTester}, {@link EqualsTester} and {@link SerializableTester}. For example: <pre>
* public class PackageSanityTests extends AbstractPackageSanityTests {}
* </pre>
*
* <p>Note that only top-level classes with either a non-private constructor or a non-private static
* factory method to construct instances can have their instance methods checked. For example: <pre>
* public class Address {
* private final String city;
* private final String state;
* private final String zipcode;
*
* public Address(String city, String state, String zipcode) {...}
*
* {@literal @Override} public boolean equals(Object obj) {...}
* {@literal @Override} public int hashCode() {...}
* ...
* }
* </pre>
* <p>No cascading checks are performed against the return values of methods unless the method is a
* static factory method. Neither are semantics of mutation methods such as {@code
* someList.add(obj)} checked. For more detailed discussion of supported and unsupported cases, see
* {@link #testEquals}, {@link #testNulls} and {@link #testSerializable}.
*
* <p>For testing against the returned instances from a static factory class, such as <pre>
* interface Book {...}
* public class Books {
* public static Book hardcover(String title) {...}
* public static Book paperback(String title) {...}
* }
* </pre>
* <p>please use {@link ClassSanityTester#forAllPublicStaticMethods}.
*
* <p>This class incurs IO because it scans the classpath and reads classpath resources.
*
* @author Ben Yu
* @since 14.0
*/
@Beta
// TODO: Switch to JUnit 4 and use @Parameterized and @BeforeClass
public abstract class AbstractPackageSanityTests extends TestCase {
/* The names of the expected method that tests null checks. */
private static final ImmutableList<String> NULL_TEST_METHOD_NAMES = ImmutableList.of(
"testNulls", "testNull",
"testNullPointers", "testNullPointer",
"testNullPointerExceptions", "testNullPointerException");
/* The names of the expected method that tests serializable. */
private static final ImmutableList<String> SERIALIZABLE_TEST_METHOD_NAMES = ImmutableList.of(
"testSerializable", "testSerialization",
"testEqualsAndSerializable", "testEqualsAndSerialization");
/* The names of the expected method that tests equals. */
private static final ImmutableList<String> EQUALS_TEST_METHOD_NAMES = ImmutableList.of(
"testEquals", "testEqualsAndHashCode",
"testEqualsAndSerializable", "testEqualsAndSerialization",
"testEquality");
private static final Chopper TEST_SUFFIX =
suffix("Test")
.or(suffix("Tests"))
.or(suffix("TestCase"))
.or(suffix("TestSuite"));
private final Logger logger = Logger.getLogger(getClass().getName());
private final ClassSanityTester tester = new ClassSanityTester();
private Visibility visibility = Visibility.PACKAGE;
private Predicate<Class<?>> classFilter = new Predicate<Class<?>>() {
@Override public boolean apply(Class<?> cls) {
return visibility.isVisible(cls.getModifiers());
}
};
/**
* Restricts the sanity tests for public API only. By default, package-private API are also
* covered.
*/
protected final void publicApiOnly() {
visibility = Visibility.PUBLIC;
}
/**
* Tests all top-level {@link Serializable} classes in the package. For a serializable Class
* {@code C}:
* <ul>
* <li>If {@code C} explicitly implements {@link Object#equals}, the deserialized instance will be
* checked to be equal to the instance before serialization.
* <li>If {@code C} doesn't explicitly implement {@code equals} but instead inherits it from a
* superclass, no equality check is done on the deserialized instance because it's not clear
* whether the author intended for the class to be a value type.
* <li>If a constructor or factory method takes a parameter whose type is interface, a dynamic
* proxy will be passed to the method. It's possible that the method body expects an instance
* method of the passed-in proxy to be of a certain value yet the proxy isn't aware of the
* assumption, in which case the equality check before and after serialization will fail.
* <li>If the constructor or factory method takes a parameter that {@link
* AbstractPackageSanityTests} doesn't know how to construct, the test will fail.
* <li>If there is no visible constructor or visible static factory method declared by {@code C},
* {@code C} is skipped for serialization test, even if it implements {@link Serializable}.
* <li>Serialization test is not performed on method return values unless the method is a visible
* static factory method whose return type is {@code C} or {@code C}'s subtype.
* </ul>
*
* <p>In all cases, if {@code C} needs custom logic for testing serialization, you can add an
* explicit {@code testSerializable()} test in the corresponding {@code CTest} class, and {@code
* C} will be excluded from automated serialization test performed by this method.
*/
@Test
public void testSerializable() throws Exception {
// TODO: when we use @BeforeClass, we can pay the cost of class path scanning only once.
for (Class<?> classToTest
: findClassesToTest(loadClassesInPackage(), SERIALIZABLE_TEST_METHOD_NAMES)) {
if (Serializable.class.isAssignableFrom(classToTest)) {
try {
Object instance = tester.instantiate(classToTest);
if (instance != null) {
if (isEqualsDefined(classToTest)) {
SerializableTester.reserializeAndAssert(instance);
} else {
SerializableTester.reserialize(instance);
}
}
} catch (Throwable e) {
throw sanityError(classToTest, SERIALIZABLE_TEST_METHOD_NAMES, "serializable test", e);
}
}
}
}
/**
* Performs {@link NullPointerTester} checks for all top-level classes in the package. For a class
* {@code C}
* <ul>
* <li>All visible static methods are checked such that passing null for any parameter that's not
* annotated with {@link javax.annotation.Nullable} should throw {@link NullPointerException}.
* <li>If there is any visible constructor or visible static factory method declared by the class,
* all visible instance methods will be checked too using the instance created by invoking the
* constructor or static factory method.
* <li>If the constructor or factory method used to construct instance takes a parameter that
* {@link AbstractPackageSanityTests} doesn't know how to construct, the test will fail.
* <li>If there is no visible constructor or visible static factory method declared by {@code C},
* instance methods are skipped for nulls test.
* <li>Nulls test is not performed on method return values unless the method is a visible static
* factory method whose return type is {@code C} or {@code C}'s subtype.
* </ul>
*
* <p>In all cases, if {@code C} needs custom logic for testing nulls, you can add an explicit
* {@code testNulls()} test in the corresponding {@code CTest} class, and {@code C} will be
* excluded from the automated null tests performed by this method.
*/
@Test
public void testNulls() throws Exception {
for (Class<?> classToTest
: findClassesToTest(loadClassesInPackage(), NULL_TEST_METHOD_NAMES)) {
try {
tester.doTestNulls(classToTest, visibility);
} catch (Throwable e) {
throw sanityError(classToTest, NULL_TEST_METHOD_NAMES, "nulls test", e);
}
}
}
/**
* Tests {@code equals()} and {@code hashCode()} implementations for every top-level class in the
* package, that explicitly implements {@link Object#equals}. For a class {@code C}:
* <ul>
* <li>The visible constructor or visible static factory method with the most parameters is used
* to construct the sample instances. In case of tie, the candidate constructors or factories
* are tried one after another until one can be used to construct sample instances.
* <li>For the constructor or static factory method used to construct instances, it's checked that
* when equal parameters are passed, the result instance should also be equal; and vice versa.
* <li>Inequality check is not performed against state mutation methods such as {@link List#add},
* or functional update methods such as {@link com.google.common.base.Joiner#skipNulls}.
* <li>If the constructor or factory method used to construct instance takes a parameter that
* {@link AbstractPackageSanityTests} doesn't know how to construct, the test will fail.
* <li>If there is no visible constructor or visible static factory method declared by {@code C},
* {@code C} is skipped for equality test.
* <li>Equality test is not performed on method return values unless the method is a visible
* static factory method whose return type is {@code C} or {@code C}'s subtype.
* </ul>
*
* <p>In all cases, if {@code C} needs custom logic for testing {@code equals()}, you can add an
* explicit {@code testEquals()} test in the corresponding {@code CTest} class, and {@code C} will
* be excluded from the automated {@code equals} test performed by this method.
*/
@Test
public void testEquals() throws Exception {
for (Class<?> classToTest
: findClassesToTest(loadClassesInPackage(), EQUALS_TEST_METHOD_NAMES)) {
if (!classToTest.isEnum() && isEqualsDefined(classToTest)) {
try {
tester.doTestEquals(classToTest);
} catch (Throwable e) {
throw sanityError(classToTest, EQUALS_TEST_METHOD_NAMES, "equals test", e);
}
}
}
}
/**
* Sets the default value for {@code type}, when dummy value for a parameter of the same type
* needs to be created in order to invoke a method or constructor. The default value isn't used in
* testing {@link Object#equals} because more than one sample instances are needed for testing
* inequality.
*/
protected final <T> void setDefault(Class<T> type, T value) {
tester.setDefault(type, value);
}
/** Specifies that classes that satisfy the given predicate aren't tested for sanity. */
protected final void ignoreClasses(Predicate<? super Class<?>> condition) {
this.classFilter = and(this.classFilter, not(condition));
}
private static AssertionFailedError sanityError(
Class<?> cls, List<String> explicitTestNames, String description, Throwable e) {
String message = String.format(
"Error in automated %s of %s\n"
+ "If the class is better tested explicitly, you can add %s() to %sTest",
description, cls, explicitTestNames.get(0), cls.getName());
AssertionFailedError error = new AssertionFailedError(message);
error.initCause(e);
return error;
}
/**
* Finds the classes not ending with a test suffix and not covered by an explicit test
* whose name is {@code explicitTestName}.
*/
@VisibleForTesting List<Class<?>> findClassesToTest(
Iterable<? extends Class<?>> classes, Iterable<String> explicitTestNames) {
// "a.b.Foo" -> a.b.Foo.class
TreeMap<String, Class<?>> classMap = Maps.newTreeMap();
for (Class<?> cls : classes) {
classMap.put(cls.getName(), cls);
}
// Foo.class -> [FooTest.class, FooTests.class, FooTestSuite.class, ...]
Multimap<Class<?>, Class<?>> testClasses = HashMultimap.create();
LinkedHashSet<Class<?>> candidateClasses = Sets.newLinkedHashSet();
for (Class<?> cls : classes) {
Optional<String> testedClassName = TEST_SUFFIX.chop(cls.getName());
if (testedClassName.isPresent()) {
Class<?> testedClass = classMap.get(testedClassName.get());
if (testedClass != null) {
testClasses.put(testedClass, cls);
}
} else {
candidateClasses.add(cls);
}
}
List<Class<?>> result = Lists.newArrayList();
NEXT_CANDIDATE: for (Class<?> candidate : Iterables.filter(candidateClasses, classFilter)) {
for (Class<?> testClass : testClasses.get(candidate)) {
if (hasTest(testClass, explicitTestNames)) {
// covered by explicit test
continue NEXT_CANDIDATE;
}
}
result.add(candidate);
}
return result;
}
private List<Class<?>> loadClassesInPackage() throws IOException {
List<Class<?>> classes = Lists.newArrayList();
String packageName = getClass().getPackage().getName();
for (ClassPath.ClassInfo classInfo
: ClassPath.from(getClass().getClassLoader()).getTopLevelClasses(packageName)) {
Class<?> cls;
try {
cls = classInfo.load();
} catch (NoClassDefFoundError e) {
// In case there were linking problems, this is probably not a class we care to test anyway.
logger.log(Level.SEVERE, "Cannot load class " + classInfo + ", skipping...", e);
continue;
}
if (!cls.isInterface()) {
classes.add(cls);
}
}
return classes;
}
private static boolean hasTest(Class<?> testClass, Iterable<String> testNames) {
for (String testName : testNames) {
try {
testClass.getMethod(testName);
return true;
} catch (NoSuchMethodException e) {
continue;
}
}
return false;
}
private static boolean isEqualsDefined(Class<?> cls) {
try {
return !cls.getDeclaredMethod("equals", Object.class).isSynthetic();
} catch (NoSuchMethodException e) {
return false;
}
}
static abstract class Chopper {
final Chopper or(final Chopper you) {
final Chopper i = this;
return new Chopper() {
@Override Optional<String> chop(String str) {
return i.chop(str).or(you.chop(str));
}
};
}
abstract Optional<String> chop(String str);
static Chopper suffix(final String suffix) {
return new Chopper() {
@Override Optional<String> chop(String str) {
if (str.endsWith(suffix)) {
return Optional.of(str.substring(0, str.length() - suffix.length()));
} else {
return Optional.absent();
}
}
};
}
}
}
| |
package metricapp.service.controller;
import java.io.IOException;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import metricapp.dto.metric.MetricCrudDTO;
import metricapp.dto.metric.MetricDTO;
import metricapp.entity.Entity;
import metricapp.entity.State;
import metricapp.entity.external.PointerBus;
import metricapp.entity.metric.Metric;
import metricapp.exception.BadInputException;
import metricapp.exception.BusException;
import metricapp.exception.DBException;
import metricapp.exception.IllegalStateTransitionException;
import metricapp.exception.NotFoundException;
import metricapp.service.spec.ModelMapperFactoryInterface;
import metricapp.service.spec.controller.MetricCRUDInterface;
import metricapp.service.spec.repository.BusApprovedElementInterface;
import metricapp.service.spec.repository.MetricRepository;
import metricapp.utility.stateTransitionUtils.AbstractStateTransitionFactory;
@Service
public class MetricCRUDController implements MetricCRUDInterface {
@Autowired
private MetricRepository metricRepository;
@Autowired
private ModelMapperFactoryInterface modelMapperFactory;
@Autowired
private BusApprovedElementInterface busApprovedElementRepository;
@Override
public MetricCrudDTO getMetricById(String id) throws BadInputException, NotFoundException {
if (id == null) {
throw new BadInputException("Metric id cannot be null");
}
Metric metric = metricRepository.findMetricById(id);
if (metric == null) {
throw new NotFoundException("Metric with id " + id + "is not available");
}
MetricCrudDTO dto = new MetricCrudDTO();
dto.setRequest("Metric, id=" + id);
dto.addMetricToList(modelMapperFactory.getLooseModelMapper().map(metric, MetricDTO.class));
return dto;
}
@Override
public MetricCrudDTO getMetricByIdAndVersion(String id, String version)
throws BadInputException, NotFoundException {
if (id == null || version == null) {
throw new BadInputException("Metric id,version cannot be null");
}
Metric metric = metricRepository.findMetricByIdAndVersion(id, version);
if (metric == null) {
throw new NotFoundException("Metric with id " + id + " and version " + version + "is not available");
}
MetricCrudDTO dto = new MetricCrudDTO();
dto.setRequest("Metric, id=" + id + ";version=" + version);
dto.addMetricToList(modelMapperFactory.getLooseModelMapper().map(metric, MetricDTO.class));
return dto;
}
@Override
public MetricDTO getMetricByIdLastApprovedVersion(String id) throws BadInputException, NotFoundException, BusException, IOException {
if (id == null) {
throw new BadInputException("Metric id cannot be null");
}
Metric last =busApprovedElementRepository.getLastApprovedElement(id, Metric.class, Entity.Metric);
return modelMapperFactory.getLooseModelMapper().map(last, MetricDTO.class);
}
@Override
public MetricCrudDTO getMetricCrudDTOByIdLastApprovedVersion(String id) throws BadInputException, NotFoundException, BusException, IOException {
MetricCrudDTO dto= new MetricCrudDTO();
dto.addMetricToList(getMetricByIdLastApprovedVersion(id));
return dto;
}
@Override
public MetricCrudDTO getAll() throws NotFoundException{
ArrayList<Metric> metrics = metricRepository.findAll();
if (metrics.size() == 0) {
throw new NotFoundException("no Metrics");
}
MetricCrudDTO dto = new MetricCrudDTO();
dto.setRequest("All Metrics available on DB" );
Iterator<Metric> metricP = metrics.iterator();
while (metricP.hasNext()) {
dto.addMetricToList(modelMapperFactory.getLooseModelMapper().map(metricP.next(), MetricDTO.class));
}
return dto;
}
@Override
public MetricCrudDTO getAllApproved() throws BadInputException, BusException, IOException{
MetricCrudDTO dto= new MetricCrudDTO();
ArrayList<Metric> metrics = busApprovedElementRepository.getAllApprovedMetrics();
dto.setRequest("All approved metrics");
Iterator<Metric> metricP = metrics.iterator();
while (metricP.hasNext()) {
dto.addMetricToList(modelMapperFactory.getLooseModelMapper().map(metricP.next(), MetricDTO.class));
}
return dto;
}
@Override
public MetricCrudDTO getMetricOfUser(String userId) throws NotFoundException, BadInputException {
if (userId == null) {
throw new BadInputException("Metric userId cannot be null");
}
ArrayList<Metric> metrics = metricRepository.findMetricByMetricatorId(userId);
if (metrics.size() == 0) {
throw new NotFoundException("User " + userId + " has no Metrics");
}
MetricCrudDTO dto = new MetricCrudDTO();
dto.setRequest("Metric of " + userId);
Iterator<Metric> metricP = metrics.iterator();
while (metricP.hasNext()) {
dto.addMetricToList(modelMapperFactory.getLooseModelMapper().map(metricP.next(), MetricDTO.class));
}
return dto;
}
@Override
/**
* This function returns an array of metrics, it accepts a list of pointer bus. Every Metric is grabbed from bus.
*/
public ArrayList<MetricDTO> getMetricsByPointerBusList(List<PointerBus> list){
ArrayList<MetricDTO> metricsDTO = new ArrayList<MetricDTO>();
Iterator<PointerBus> itM = list.iterator();
while(itM.hasNext()){
try {
metricsDTO.add(this.getMetricByIdLastApprovedVersion(itM.next().getInstance()));
} catch (Exception e) {
}
}
return metricsDTO;
}
@Override
public MetricCrudDTO createMetric(@Nonnull MetricDTO dto) throws BadInputException {
if (dto.getMetadata().getCreatorId() == null) {
throw new BadInputException("Bad Input");
}
if (dto.getMetadata().getId() != null) {
throw new BadInputException("New Metrics cannot have ID");
}
if (dto.getMetadata().getState() != State.Created) {
throw new BadInputException("New Metrics must be in state of CREATED");
}
dto.getMetadata().setCreationDate(LocalDate.now());
dto.getMetadata().setLastVersionDate(LocalDate.now());
Metric newMetric = modelMapperFactory.getLooseModelMapper().map(dto, Metric.class);
newMetric.setCreationDate(LocalDate.now());
newMetric.setLastVersionDate(LocalDate.now());
newMetric.setEntityType(Entity.Metric);
newMetric.setVersion("0");
MetricCrudDTO dtoCrud = new MetricCrudDTO();
dtoCrud.setRequest("create Metric");
dtoCrud.addMetricToList(
modelMapperFactory.getLooseModelMapper().map(metricRepository.save(newMetric), MetricDTO.class));
return dtoCrud;
}
/**
* This method updates a Metric. This is useful when a Metricator modifies the Metric's fields.
* ModelMapper is used to retrieve the old Metric instance.
* @param dto MetricDTO
* @return MetricCrudDTO
*/
@Override
public MetricCrudDTO updateMetric(MetricDTO dto)
throws BadInputException, IllegalStateTransitionException, NotFoundException, DBException {
if (dto == null) {
throw new BadInputException("Bad Input");
}
if (dto.getMetadata().getId() == null) {
throw new BadInputException("Metrics cannot have null ID");
}
/**
*
* Note that an Update will be executed IFF dto contains version number
* equals to version on MongoDB
*
**/
String id = dto.getMetadata().getId();
Metric newMetric = modelMapperFactory.getLooseModelMapper().map(dto, Metric.class);
Metric oldMetric = metricRepository.findMetricById(id);
stateTransition(oldMetric, newMetric);
MetricCrudDTO dtoCrud = new MetricCrudDTO();
dtoCrud.setRequest("update Metric id" + id);
if (oldMetric == null) {
throw new NotFoundException();
}
try {
dtoCrud.addMetricToList(
modelMapperFactory.getLooseModelMapper().map(metricRepository.save(newMetric), MetricDTO.class));
} catch (Exception e) {
throw new DBException("Error in saving, tipically your version is not the last");
}
return dtoCrud;
}
/**
* This method just change state to a Metric. This is useful when a project manager
* just needs to approve/reject a Metric without change the Metric's fields.
* ModelMapper is used just to clone the Metric instance, so the set is made manually.
* @param dto, in which must be id, releaseNote, state
* @return MetricCrudDTO
*/
@Override
public MetricCrudDTO changeStateMetric(MetricDTO dto)
throws BadInputException, IllegalStateTransitionException, NotFoundException, DBException {
if (dto == null) {
throw new BadInputException("Bad Input");
}
if (dto.getMetadata().getId() == null) {
throw new BadInputException("Metrics cannot have null ID");
}
Metric oldMetric = new Metric();
Metric newMetric = metricRepository.findMetricById(dto.getMetadata().getId());//modelMapperFactory.getLooseModelMapper().map(oldMetric, Metric.class);
oldMetric.setState(newMetric.getState());
newMetric.setState(dto.getMetadata().getState());
newMetric.setReleaseNote(dto.getMetadata().getReleaseNote());
oldMetric.setVersion(newMetric.getVersion());
stateTransition(oldMetric, newMetric);
MetricCrudDTO dtoCrud = new MetricCrudDTO();
dtoCrud.setRequest("update Metric id" + dto.getMetadata().getId());
try {
dtoCrud.addMetricToList(
modelMapperFactory.getLooseModelMapper().map(metricRepository.save(newMetric), MetricDTO.class));
} catch (Exception e) {
throw new DBException("Error in saving, tipically your version is not the last");
}
return dtoCrud;
}
@Override
public void deleteMetricById(String id) throws BadInputException, IllegalStateTransitionException {
if (id == null) {
throw new BadInputException("Bad Input");
}
if (!metricRepository.findMetricById(id).getState().equals(State.Suspended)) {
throw new IllegalStateTransitionException("A metric must be Suspended before delete");
}
metricRepository.delete(id);
}
private void stateTransition(Metric oldMetric, Metric newMetric)
throws IllegalStateTransitionException, NotFoundException {
System.out.println(oldMetric.getState() + "->" + newMetric.getState());
newMetric.setLastVersionDate(LocalDate.now());
if (oldMetric.getState().equals(newMetric.getState())) {
return;
}
try {
AbstractStateTransitionFactory.getFactory(Entity.Metric).transition(oldMetric, newMetric).execute();
} catch (Exception e) {
throw new IllegalStateTransitionException(e);
}
}
@Override
public long countMetricByState(String state, String userId) throws BadInputException, NotFoundException{
if (state == null) {
throw new BadInputException("State cannot be null");
}
return metricRepository.countByStateAndMetricatorId(State.valueOf(state), userId);
}
@Override
public MetricCrudDTO getMetricByStateAndMetricatorId(String state, String userId) throws NotFoundException, BadInputException {
if (state == null) {
throw new BadInputException("Metric state cannot be null");
}
ArrayList<Metric> metrics = metricRepository.findByStateAndMetricatorId(State.valueOf(state),userId);
if (metrics.size() == 0) {
throw new NotFoundException("State " + state + " has no Metrics");
}
MetricCrudDTO dto = new MetricCrudDTO();
dto.setRequest("Metric of " + userId);
Iterator<Metric> metricIter = metrics.iterator();
while (metricIter.hasNext()) {
dto.addMetricToList(modelMapperFactory.getStandardModelMapper().map(metricIter.next(), MetricDTO.class));
}
return dto;
}
@Override
public MetricCrudDTO getMetricByState(String state) throws NotFoundException, BadInputException {
if (state == null) {
throw new BadInputException("Metric state cannot be null");
}
ArrayList<Metric> metrics = metricRepository.findByState(State.valueOf(state));
if (metrics.size() == 0) {
throw new NotFoundException("State " + state + " has no Metrics");
}
MetricCrudDTO dto = new MetricCrudDTO();
Iterator<Metric> metricIter = metrics.iterator();
while (metricIter.hasNext()) {
dto.addMetricToList(modelMapperFactory.getStandardModelMapper().map(metricIter.next(), MetricDTO.class));
}
return dto;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.bean;
import java.lang.annotation.Annotation;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionStage;
import org.apache.camel.AsyncCallback;
import org.apache.camel.AsyncProcessor;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Expression;
import org.apache.camel.ExpressionEvaluationException;
import org.apache.camel.ExtendedCamelContext;
import org.apache.camel.Message;
import org.apache.camel.NoTypeConversionAvailableException;
import org.apache.camel.Pattern;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.RuntimeExchangeException;
import org.apache.camel.StreamCache;
import org.apache.camel.support.DefaultMessage;
import org.apache.camel.support.ExchangeHelper;
import org.apache.camel.support.ExpressionAdapter;
import org.apache.camel.support.ObjectHelper;
import org.apache.camel.support.service.ServiceHelper;
import org.apache.camel.util.StringHelper;
import org.apache.camel.util.StringQuoteHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.camel.util.ObjectHelper.asList;
import static org.apache.camel.util.ObjectHelper.asString;
/**
* Information about a method to be used for invocation.
*/
public class MethodInfo {
private static final Logger LOG = LoggerFactory.getLogger(MethodInfo.class);
private CamelContext camelContext;
private Class<?> type;
private Method method;
private final List<ParameterInfo> parameters;
private final List<ParameterInfo> bodyParameters;
private final boolean hasCustomAnnotation;
private final boolean hasHandlerAnnotation;
private Expression parametersExpression;
private ExchangePattern pattern = ExchangePattern.InOut;
private AsyncProcessor recipientList;
private AsyncProcessor routingSlip;
private AsyncProcessor dynamicRouter;
/**
* Adapter to invoke the method which has been annotated with the @DynamicRouter
*/
private final class DynamicRouterExpression extends ExpressionAdapter {
private final Object pojo;
private DynamicRouterExpression(Object pojo) {
this.pojo = pojo;
}
@Override
public Object evaluate(Exchange exchange) {
// evaluate arguments on each invocation as the parameters can have changed/updated since last invocation
final Object[] arguments = parametersExpression.evaluate(exchange, Object[].class);
try {
return invoke(method, pojo, arguments, exchange);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
@Override
public String toString() {
return "DynamicRouter[invoking: " + method + " on bean: " + pojo + "]";
}
}
public MethodInfo(CamelContext camelContext, Class<?> type, Method method, List<ParameterInfo> parameters, List<ParameterInfo> bodyParameters,
boolean hasCustomAnnotation, boolean hasHandlerAnnotation) {
this.camelContext = camelContext;
this.type = type;
this.method = method;
this.parameters = parameters;
this.bodyParameters = bodyParameters;
this.hasCustomAnnotation = hasCustomAnnotation;
this.hasHandlerAnnotation = hasHandlerAnnotation;
this.parametersExpression = createParametersExpression();
Map<Class<?>, Annotation> collectedMethodAnnotation = collectMethodAnnotations(type, method);
Pattern oneway = findOneWayAnnotation(method);
if (oneway != null) {
pattern = oneway.value();
}
org.apache.camel.RoutingSlip routingSlipAnnotation =
(org.apache.camel.RoutingSlip)collectedMethodAnnotation.get(org.apache.camel.RoutingSlip.class);
if (routingSlipAnnotation != null && matchContext(routingSlipAnnotation.context())) {
routingSlip = camelContext.adapt(ExtendedCamelContext.class).getAnnotationBasedProcessorFactory().createRoutingSlip(camelContext, routingSlipAnnotation);
// add created routingSlip as a service so we have its lifecycle managed
try {
camelContext.addService(routingSlip);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
org.apache.camel.DynamicRouter dynamicRouterAnnotation =
(org.apache.camel.DynamicRouter)collectedMethodAnnotation.get(org.apache.camel.DynamicRouter.class);
if (dynamicRouterAnnotation != null
&& matchContext(dynamicRouterAnnotation.context())) {
dynamicRouter = camelContext.adapt(ExtendedCamelContext.class).getAnnotationBasedProcessorFactory().createDynamicRouter(camelContext, dynamicRouterAnnotation);
// add created dynamicRouter as a service so we have its lifecycle managed
try {
camelContext.addService(dynamicRouter);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
org.apache.camel.RecipientList recipientListAnnotation =
(org.apache.camel.RecipientList)collectedMethodAnnotation.get(org.apache.camel.RecipientList.class);
if (recipientListAnnotation != null
&& matchContext(recipientListAnnotation.context())) {
recipientList = camelContext.adapt(ExtendedCamelContext.class).getAnnotationBasedProcessorFactory().createRecipientList(camelContext, recipientListAnnotation);
// add created recipientList as a service so we have its lifecycle managed
try {
camelContext.addService(recipientList);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
}
private Map<Class<?>, Annotation> collectMethodAnnotations(Class<?> c, Method method) {
Map<Class<?>, Annotation> annotations = new HashMap<>();
collectMethodAnnotations(c, method, annotations);
return annotations;
}
private void collectMethodAnnotations(Class<?> c, Method method, Map<Class<?>, Annotation> annotations) {
for (Class<?> i : c.getInterfaces()) {
collectMethodAnnotations(i, method, annotations);
}
if (!c.isInterface() && c.getSuperclass() != null) {
collectMethodAnnotations(c.getSuperclass(), method, annotations);
}
// make sure the sub class can override the definition
try {
Annotation[] ma = c.getDeclaredMethod(method.getName(), method.getParameterTypes()).getAnnotations();
for (Annotation a : ma) {
annotations.put(a.annotationType(), a);
}
} catch (SecurityException | NoSuchMethodException e) {
// do nothing here
}
}
/**
* Does the given context match this camel context
*/
private boolean matchContext(String context) {
if (org.apache.camel.util.ObjectHelper.isNotEmpty(context)) {
if (!camelContext.getName().equals(context)) {
return false;
}
}
return true;
}
public String toString() {
return method.toString();
}
public MethodInvocation createMethodInvocation(final Object pojo, boolean hasParameters, final Exchange exchange) {
final Object[] arguments;
if (hasParameters) {
arguments = parametersExpression.evaluate(exchange, Object[].class);
} else {
arguments = null;
}
return new MethodInvocation() {
public Method getMethod() {
return method;
}
public Object[] getArguments() {
return arguments;
}
public boolean proceed(AsyncCallback callback) {
Object body = exchange.getIn().getBody();
if (body instanceof StreamCache) {
// ensure the stream cache is reset before calling the method
((StreamCache) body).reset();
}
try {
return doProceed(callback);
} catch (InvocationTargetException e) {
exchange.setException(e.getTargetException());
callback.done(true);
return true;
} catch (Throwable e) {
exchange.setException(e);
callback.done(true);
return true;
}
}
private boolean doProceed(AsyncCallback callback) throws Exception {
// dynamic router should be invoked beforehand
if (dynamicRouter != null) {
if (!ServiceHelper.isStarted(dynamicRouter)) {
ServiceHelper.startService(dynamicRouter);
}
// TODO: Maybe use a new constant than EVALUATE_EXPRESSION_RESULT
// use a expression which invokes the method to be used by dynamic router
Expression expression = new DynamicRouterExpression(pojo);
exchange.setProperty(Exchange.EVALUATE_EXPRESSION_RESULT, expression);
return dynamicRouter.process(exchange, callback);
}
// invoke pojo
if (LOG.isTraceEnabled()) {
LOG.trace(">>>> invoking: {} on bean: {} with arguments: {} for exchange: {}", method, pojo, asString(arguments), exchange);
}
Object result = invoke(method, pojo, arguments, exchange);
// the method may be a closure or chained method returning a callable which should be called
if (result instanceof Callable) {
LOG.trace("Method returned Callback which will be called: {}", result);
Object callableResult = ((Callable) result).call();
if (callableResult != null) {
result = callableResult;
} else {
// if callable returned null we should not change the body
result = Void.TYPE;
}
}
if (recipientList != null) {
// ensure its started
if (!ServiceHelper.isStarted(recipientList)) {
ServiceHelper.startService(recipientList);
}
exchange.setProperty(Exchange.EVALUATE_EXPRESSION_RESULT, result);
return recipientList.process(exchange, callback);
}
if (routingSlip != null) {
if (!ServiceHelper.isStarted(routingSlip)) {
ServiceHelper.startService(routingSlip);
}
exchange.setProperty(Exchange.EVALUATE_EXPRESSION_RESULT, result);
return routingSlip.process(exchange, callback);
}
//If it's Java 8 async result
if (CompletionStage.class.isAssignableFrom(getMethod().getReturnType())) {
CompletionStage<?> completionStage = (CompletionStage<?>) result;
completionStage
.whenComplete((resultObject, e) -> {
if (e != null) {
exchange.setException(e);
} else if (resultObject != null) {
fillResult(exchange, resultObject);
}
callback.done(false);
});
return false;
}
// if the method returns something then set the value returned on the Exchange
if (!getMethod().getReturnType().equals(Void.TYPE) && result != Void.TYPE) {
fillResult(exchange, result);
}
// we did not use any of the eips, but just invoked the bean
// so notify the callback we are done synchronously
callback.done(true);
return true;
}
public Object getThis() {
return pojo;
}
public AccessibleObject getStaticPart() {
return method;
}
};
}
private void fillResult(Exchange exchange, Object result) {
LOG.trace("Setting bean invocation result : {}", result);
// the bean component forces OUT if the MEP is OUT capable
boolean out = ExchangeHelper.isOutCapable(exchange) || exchange.hasOut();
Message old;
if (out) {
old = exchange.getOut();
// propagate headers
exchange.getOut().getHeaders().putAll(exchange.getIn().getHeaders());
} else {
old = exchange.getIn();
}
// create a new message container so we do not drag specialized message objects along
// but that is only needed if the old message is a specialized message
boolean copyNeeded = !(old.getClass().equals(DefaultMessage.class));
if (copyNeeded) {
Message msg = new DefaultMessage(exchange.getContext());
msg.copyFromWithNewBody(old, result);
// replace message on exchange
ExchangeHelper.replaceMessage(exchange, msg, false);
} else {
// no copy needed so set replace value directly
old.setBody(result);
}
}
public Class<?> getType() {
return type;
}
public Method getMethod() {
return method;
}
/**
* Returns the {@link org.apache.camel.ExchangePattern} that should be used when invoking this method. This value
* defaults to {@link org.apache.camel.ExchangePattern#InOut} unless some {@link org.apache.camel.Pattern} annotation is used
* to override the message exchange pattern.
*
* @return the exchange pattern to use for invoking this method.
*/
public ExchangePattern getPattern() {
return pattern;
}
public Expression getParametersExpression() {
return parametersExpression;
}
public List<ParameterInfo> getBodyParameters() {
return bodyParameters;
}
public Class<?> getBodyParameterType() {
if (bodyParameters.isEmpty()) {
return null;
}
ParameterInfo parameterInfo = bodyParameters.get(0);
return parameterInfo.getType();
}
public boolean bodyParameterMatches(Class<?> bodyType) {
Class<?> actualType = getBodyParameterType();
return actualType != null && org.apache.camel.util.ObjectHelper.isAssignableFrom(bodyType, actualType);
}
public List<ParameterInfo> getParameters() {
return parameters;
}
public boolean hasBodyParameter() {
return !bodyParameters.isEmpty();
}
public boolean hasCustomAnnotation() {
return hasCustomAnnotation;
}
public boolean hasHandlerAnnotation() {
return hasHandlerAnnotation;
}
public boolean hasParameters() {
return !parameters.isEmpty();
}
public boolean isReturnTypeVoid() {
return method.getReturnType().getName().equals("void");
}
public boolean isStaticMethod() {
return Modifier.isStatic(method.getModifiers());
}
/**
* Returns true if this method is covariant with the specified method
* (this method may above or below the specified method in the class hierarchy)
*/
public boolean isCovariantWith(MethodInfo method) {
return
method.getMethod().getName().equals(this.getMethod().getName())
&& (method.getMethod().getReturnType().isAssignableFrom(this.getMethod().getReturnType())
|| this.getMethod().getReturnType().isAssignableFrom(method.getMethod().getReturnType()))
&& Arrays.deepEquals(method.getMethod().getParameterTypes(), this.getMethod().getParameterTypes());
}
protected Object invoke(Method mth, Object pojo, Object[] arguments, Exchange exchange) throws InvocationTargetException {
try {
return ObjectHelper.invokeMethodSafe(mth, pojo, arguments);
} catch (IllegalAccessException e) {
throw new RuntimeExchangeException("IllegalAccessException occurred invoking method: " + mth + " using arguments: " + asList(arguments), exchange, e);
} catch (IllegalArgumentException e) {
throw new RuntimeExchangeException("IllegalArgumentException occurred invoking method: " + mth + " using arguments: " + asList(arguments), exchange, e);
}
}
protected Expression[] createParameterExpressions() {
final int size = parameters.size();
LOG.trace("Creating parameters expression for {} parameters", size);
final Expression[] expressions = new Expression[size];
for (int i = 0; i < size; i++) {
Expression parameterExpression = parameters.get(i).getExpression();
expressions[i] = parameterExpression;
LOG.trace("Parameter #{} has expression: {}", i, parameterExpression);
}
return expressions;
}
protected Expression createParametersExpression() {
return new ParameterExpression(createParameterExpressions());
}
/**
* Finds the oneway annotation in priority order; look for method level annotations first, then the class level annotations,
* then super class annotations then interface annotations
*
* @param method the method on which to search
* @return the first matching annotation or none if it is not available
*/
protected Pattern findOneWayAnnotation(Method method) {
Pattern answer = getPatternAnnotation(method);
if (answer == null) {
Class<?> type = method.getDeclaringClass();
// create the search order of types to scan
List<Class<?>> typesToSearch = new ArrayList<>();
addTypeAndSuperTypes(type, typesToSearch);
Class<?>[] interfaces = type.getInterfaces();
for (Class<?> anInterface : interfaces) {
addTypeAndSuperTypes(anInterface, typesToSearch);
}
// now let's scan for a type which the current declared class overloads
answer = findOneWayAnnotationOnMethod(typesToSearch, method);
if (answer == null) {
answer = findOneWayAnnotation(typesToSearch);
}
}
return answer;
}
/**
* Returns the pattern annotation on the given annotated element; either as a direct annotation or
* on an annotation which is also annotated
*
* @param annotatedElement the element to look for the annotation
* @return the first matching annotation or null if none could be found
*/
protected Pattern getPatternAnnotation(AnnotatedElement annotatedElement) {
return getPatternAnnotation(annotatedElement, 2);
}
/**
* Returns the pattern annotation on the given annotated element; either as a direct annotation or
* on an annotation which is also annotated
*
* @param annotatedElement the element to look for the annotation
* @param depth the current depth
* @return the first matching annotation or null if none could be found
*/
protected Pattern getPatternAnnotation(AnnotatedElement annotatedElement, int depth) {
Pattern answer = annotatedElement.getAnnotation(Pattern.class);
int nextDepth = depth - 1;
if (nextDepth > 0) {
// look at all the annotations to see if any of those are annotated
Annotation[] annotations = annotatedElement.getAnnotations();
for (Annotation annotation : annotations) {
Class<? extends Annotation> annotationType = annotation.annotationType();
if (annotation instanceof Pattern || annotationType.equals(annotatedElement)) {
continue;
} else {
Pattern another = getPatternAnnotation(annotationType, nextDepth);
if (pattern != null) {
if (answer == null) {
answer = another;
} else {
LOG.warn("Duplicate pattern annotation: {} found on annotation: {} which will be ignored", another, annotation);
}
}
}
}
}
return answer;
}
/**
* Adds the current class and all of its base classes (apart from {@link Object} to the given list
*/
protected void addTypeAndSuperTypes(Class<?> type, List<Class<?>> result) {
for (Class<?> t = type; t != null && t != Object.class; t = t.getSuperclass()) {
result.add(t);
}
}
/**
* Finds the first annotation on the base methods defined in the list of classes
*/
protected Pattern findOneWayAnnotationOnMethod(List<Class<?>> classes, Method method) {
for (Class<?> type : classes) {
try {
Method definedMethod = type.getMethod(method.getName(), method.getParameterTypes());
Pattern answer = getPatternAnnotation(definedMethod);
if (answer != null) {
return answer;
}
} catch (NoSuchMethodException e) {
// ignore
}
}
return null;
}
/**
* Finds the first annotation on the given list of classes
*/
protected Pattern findOneWayAnnotation(List<Class<?>> classes) {
for (Class<?> type : classes) {
Pattern answer = getPatternAnnotation(type);
if (answer != null) {
return answer;
}
}
return null;
}
protected boolean hasExceptionParameter() {
for (ParameterInfo parameter : parameters) {
if (Exception.class.isAssignableFrom(parameter.getType())) {
return true;
}
}
return false;
}
/**
* Expression to evaluate the bean parameter parameters and provide the correct values when the method is invoked.
*/
private final class ParameterExpression implements Expression {
private final Expression[] expressions;
ParameterExpression(Expression[] expressions) {
this.expressions = expressions;
}
@SuppressWarnings("unchecked")
public <T> T evaluate(Exchange exchange, Class<T> type) {
Object body = exchange.getIn().getBody();
// if there was an explicit method name to invoke, then we should support using
// any provided parameter values in the method name
String methodName = exchange.getIn().getHeader(Exchange.BEAN_METHOD_NAME, String.class);
// the parameter values is between the parenthesis
String methodParameters = StringHelper.betweenOuterPair(methodName, '(', ')');
// use an iterator to walk the parameter values
Iterator<?> it = null;
if (methodParameters != null) {
// split the parameters safely separated by comma, but beware that we can have
// quoted parameters which contains comma as well, so do a safe quote split
String[] parameters = StringQuoteHelper.splitSafeQuote(methodParameters, ',', true);
it = ObjectHelper.createIterator(parameters, ",", true);
}
// remove headers as they should not be propagated
// we need to do this before the expressions gets evaluated as it may contain
// a @Bean expression which would by mistake read these headers. So the headers
// must be removed at this point of time
if (methodName != null) {
exchange.getIn().removeHeader(Exchange.BEAN_METHOD_NAME);
}
Object[] answer = evaluateParameterExpressions(exchange, body, it);
return (T) answer;
}
/**
* Evaluates all the parameter expressions
*/
private Object[] evaluateParameterExpressions(Exchange exchange, Object body, Iterator<?> it) {
Object[] answer = new Object[expressions.length];
for (int i = 0; i < expressions.length; i++) {
if (body instanceof StreamCache) {
// need to reset stream cache for each expression as you may access the message body in multiple parameters
((StreamCache) body).reset();
}
// grab the parameter value for the given index
Object parameterValue = it != null && it.hasNext() ? it.next() : null;
// and the expected parameter type
Class<?> parameterType = parameters.get(i).getType();
// the value for the parameter to use
Object value = null;
// prefer to use parameter value if given, as they override any bean parameter binding
// we should skip * as its a type placeholder to indicate any type
if (parameterValue != null && !parameterValue.equals("*")) {
// evaluate the parameter value binding
value = evaluateParameterValue(exchange, i, parameterValue, parameterType);
}
// use bean parameter binding, if still no value
Expression expression = expressions[i];
if (value == null && expression != null) {
value = evaluateParameterBinding(exchange, expression, i, parameterType);
}
// remember the value to use
if (value != Void.TYPE) {
answer[i] = value;
}
}
return answer;
}
/**
* Evaluate using parameter values where the values can be provided in the method name syntax.
* <p/>
* This methods returns accordingly:
* <ul>
* <li><tt>null</tt> - if not a parameter value</li>
* <li><tt>Void.TYPE</tt> - if an explicit null, forcing Camel to pass in <tt>null</tt> for that given parameter</li>
* <li>a non <tt>null</tt> value - if the parameter was a parameter value, and to be used</li>
* </ul>
*
* @since 2.9
*/
private Object evaluateParameterValue(Exchange exchange, int index, Object parameterValue, Class<?> parameterType) {
Object answer = null;
// convert the parameter value to a String
String exp = exchange.getContext().getTypeConverter().convertTo(String.class, exchange, parameterValue);
if (exp != null) {
// check if its a valid parameter value
boolean valid = BeanHelper.isValidParameterValue(exp);
if (!valid) {
// it may be a parameter type instead, and if so, then we should return null,
// as this method is only for evaluating parameter values
Boolean isClass = BeanHelper.isAssignableToExpectedType(exchange.getContext().getClassResolver(), exp, parameterType);
// the method will return a non null value if exp is a class
if (isClass != null) {
return null;
}
}
// use simple language to evaluate the expression, as it may use the simple language to refer to message body, headers etc.
Expression expression = null;
try {
expression = exchange.getContext().resolveLanguage("simple").createExpression(exp);
parameterValue = expression.evaluate(exchange, Object.class);
// use "null" to indicate the expression returned a null value which is a valid response we need to honor
if (parameterValue == null) {
parameterValue = "null";
}
} catch (Exception e) {
throw new ExpressionEvaluationException(expression, "Cannot create/evaluate simple expression: " + exp
+ " to be bound to parameter at index: " + index + " on method: " + getMethod(), exchange, e);
}
// special for explicit null parameter values (as end users can explicit indicate they want null as parameter)
// see method javadoc for details
if ("null".equals(parameterValue)) {
return Void.TYPE;
}
// the parameter value may match the expected type, then we use it as-is
if (parameterType.isAssignableFrom(parameterValue.getClass())) {
valid = true;
} else {
// the parameter value was not already valid, but since the simple language have evaluated the expression
// which may change the parameterValue, so we have to check it again to see if its now valid
exp = exchange.getContext().getTypeConverter().tryConvertTo(String.class, parameterValue);
// String values from the simple language is always valid
if (!valid) {
// re validate if the parameter was not valid the first time (String values should be accepted)
valid = parameterValue instanceof String || BeanHelper.isValidParameterValue(exp);
}
}
if (valid) {
// we need to unquote String parameters, as the enclosing quotes is there to denote a parameter value
if (parameterValue instanceof String) {
parameterValue = StringHelper.removeLeadingAndEndingQuotes((String) parameterValue);
}
if (parameterValue != null) {
try {
// its a valid parameter value, so convert it to the expected type of the parameter
answer = exchange.getContext().getTypeConverter().mandatoryConvertTo(parameterType, exchange, parameterValue);
if (LOG.isTraceEnabled()) {
LOG.trace("Parameter #{} evaluated as: {} type: {}", index, answer, org.apache.camel.util.ObjectHelper.type(answer));
}
} catch (Exception e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Cannot convert from type: {} to type: {} for parameter #{}", org.apache.camel.util.ObjectHelper.type(parameterValue), parameterType, index);
}
throw new ParameterBindingException(e, method, index, parameterType, parameterValue);
}
}
}
}
return answer;
}
/**
* Evaluate using classic parameter binding using the pre compute expression
*/
private Object evaluateParameterBinding(Exchange exchange, Expression expression, int index, Class<?> parameterType) {
Object answer = null;
// use object first to avoid type conversion so we know if there is a value or not
Object result = expression.evaluate(exchange, Object.class);
if (result != null) {
try {
if (parameterType.isInstance(result)) {
// optimize if the value is already the same type
answer = result;
} else {
// we got a value now try to convert it to the expected type
answer = exchange.getContext().getTypeConverter().mandatoryConvertTo(parameterType, result);
}
if (LOG.isTraceEnabled()) {
LOG.trace("Parameter #{} evaluated as: {} type: {}", index, answer, org.apache.camel.util.ObjectHelper.type(answer));
}
} catch (NoTypeConversionAvailableException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Cannot convert from type: {} to type: {} for parameter #{}", org.apache.camel.util.ObjectHelper.type(result), parameterType, index);
}
throw new ParameterBindingException(e, method, index, parameterType, result);
}
} else {
LOG.trace("Parameter #{} evaluated as null", index);
}
return answer;
}
@Override
public String toString() {
return "ParametersExpression: " + Arrays.asList(expressions);
}
}
}
| |
/*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.planner.consumer;
import io.crate.analyze.relations.AnalyzedRelation;
import io.crate.operation.operator.AndOperator;
import io.crate.planner.symbol.*;
import javax.annotation.Nullable;
import java.util.*;
public class QuerySplitter {
private final static InternalSplitter INTERNAL_SPLITTER = new InternalSplitter();
private final static RelationFieldCounter RELATION_FIELD_COUNTER = new RelationFieldCounter();
public static class SplitQueries {
private Symbol relationQuery;
private Symbol remainingQuery;
private SplitQueries() {}
/**
*
* @return the query for the given relation. Null if no part could be split-off
*/
@Nullable
public Symbol relationQuery() {
return relationQuery;
}
public Symbol remainingQuery() {
return remainingQuery;
}
}
/**
* splits the given query into two queries. One that can be executed by the given relation and another one which
* contains all the remaining parts that can't be executed.
*/
public static SplitQueries splitForRelation(AnalyzedRelation analyzedRelation, Symbol query) {
RelationFieldCounterCtx relationFieldCounterCtx = new RelationFieldCounterCtx(analyzedRelation);
RELATION_FIELD_COUNTER.process(query, relationFieldCounterCtx);
InternalSplitterCtx internalSplitterCtx = new InternalSplitterCtx(analyzedRelation, relationFieldCounterCtx.countMap);
Symbol remainingQuery = INTERNAL_SPLITTER.process(query, internalSplitterCtx);
SplitQueries splitQueries = new SplitQueries();
splitQueries.remainingQuery = remainingQuery;
if (!internalSplitterCtx.relationQueryParts.isEmpty()) {
splitQueries.relationQuery = joinQueryParts(internalSplitterCtx.relationQueryParts);
}
return splitQueries;
}
public static RelationCount getRelationCount(AnalyzedRelation analyzedRelation, Symbol symbol) {
RelationFieldCounterCtx relationFieldCounterCtx = new RelationFieldCounterCtx(analyzedRelation);
RELATION_FIELD_COUNTER.process(symbol, relationFieldCounterCtx);
return relationFieldCounterCtx.countMap.get(symbol);
}
private static Symbol joinQueryParts(List<Symbol> queryParts) {
if (queryParts.size() == 1) {
return queryParts.get(0);
}
if (queryParts.size() == 2) {
return new Function(AndOperator.INFO, queryParts);
}
return new Function(AndOperator.INFO,
Arrays.asList(
queryParts.get(0),
joinQueryParts(queryParts.subList(1, queryParts.size()))));
}
public static class RelationFieldCounterCtx {
AnalyzedRelation analyzedRelation;
Stack<Symbol> parents = new Stack<>();
IdentityHashMap<Symbol, RelationCount> countMap = new IdentityHashMap<>();
public RelationFieldCounterCtx(AnalyzedRelation analyzedRelation) {
this.analyzedRelation = analyzedRelation;
}
}
public static class RelationCount {
int numThis = 0;
int numOther = 0;
private RelationCount() {
}
private RelationCount(int numThis, int numOther) {
this.numThis = numThis;
this.numOther = numOther;
}
}
/**
* A Visitor that will generate a map with information on how many symbols which reference other relations
* are beneath a symbol.
*
* E.g.
* where ( t1.x = 1 or t1.x = 2) and (t2.x = 3 or (t2.x = 4 or t2.x = 5))
*
*
* AND 1
* OR 2 OR 3
* t1.x = 1 t1.x = 2 t2.x = 3 OR 4
* t2.x = 4 t2.x = 5
*
* For t1:
* AND 1
* numThis: 2
* numOther: 3
*
* OR 2:
* numThis: 2
* numOther: 0
*
* and so on..
*/
private static class RelationFieldCounter extends SymbolVisitor<RelationFieldCounterCtx, RelationCount> {
@Override
public RelationCount visitFunction(Function function, RelationFieldCounterCtx context) {
RelationCount relationCount = new RelationCount();
context.parents.push(function);
for (Symbol argument : function.arguments()) {
RelationCount childCounts = process(argument, context);
relationCount.numOther += childCounts.numOther;
relationCount.numThis += childCounts.numThis;
}
context.parents.pop();
context.countMap.put(function, relationCount);
return relationCount;
}
@Override
public RelationCount visitField(Field field, RelationFieldCounterCtx context) {
RelationCount relationCount;
if (field.relation() == context.analyzedRelation) {
relationCount = new RelationCount(1, 0);
} else {
relationCount = new RelationCount(0, 1);
}
if (context.parents.isEmpty()) {
context.countMap.put(field, relationCount);
}
return relationCount;
}
@Override
public RelationCount visitMatchPredicate(MatchPredicate matchPredicate, RelationFieldCounterCtx context) {
int numThis = 0;
int numOther = 0;
for (Field field : matchPredicate.identBoostMap().keySet()) {
if (field.relation() == context.analyzedRelation) {
numThis++;
} else {
numOther++;
}
}
if (numOther > 0 && numThis > 0) {
throw new IllegalArgumentException("Must not use columns from more than 1 relation inside the MATCH predicate");
}
RelationCount relationCount = new RelationCount(numThis, numOther);
if (context.parents.isEmpty()) {
context.countMap.put(matchPredicate, relationCount);
}
return relationCount;
}
@Override
protected RelationCount visitSymbol(Symbol symbol, RelationFieldCounterCtx context) {
RelationCount relationCount = new RelationCount(0, 0);
if (context.parents.isEmpty()) {
context.countMap.put(symbol, relationCount);
}
return relationCount;
}
}
private static class InternalSplitterCtx {
final IdentityHashMap<Symbol, RelationCount> countMap;
final AnalyzedRelation relation;
List<Symbol> relationQueryParts = new ArrayList<>();
boolean insideFunction = false;
public InternalSplitterCtx(AnalyzedRelation relation, IdentityHashMap<Symbol, RelationCount> countMap) {
this.relation = relation;
this.countMap = countMap;
}
}
/**
* Uses the Information generated by the RelationFieldCounter to split the query.
*/
private static class InternalSplitter extends SymbolVisitor<InternalSplitterCtx, Symbol> {
@Override
public Symbol visitFunction(Function function, InternalSplitterCtx context) {
RelationCount relationCount = context.countMap.get(function);
if (relationCount.numOther == 0) {
context.relationQueryParts.add(function);
return Literal.newLiteral(true);
} else {
if (AndOperator.NAME.equals(function.info().ident().name())) {
List<Symbol> newArgs = new ArrayList<>(function.arguments().size());
for (Symbol argument : function.arguments()) {
if (!argument.symbolType().isValueSymbol()) {
RelationCount argumentCount = context.countMap.get(argument);
assert argumentCount != null : "relationCount for argument must be available";
if (argumentCount.numOther == 0) {
context.relationQueryParts.add(argument);
argument = Literal.newLiteral(true);
} else {
context.insideFunction = true;
argument = process(argument, context);
context.insideFunction = false;
}
}
newArgs.add(argument);
}
return new Function(function.info(), newArgs);
} else {
return function;
}
}
}
@Override
protected Symbol visitSymbol(Symbol symbol, InternalSplitterCtx context) {
// for cases like: "where t1.bool_field" or "where match (t1.name...)"
if (!context.insideFunction) {
RelationCount relationCount = context.countMap.get(symbol);
assert relationCount != null : "relation count must be available";
if (relationCount.numOther == 0) {
context.relationQueryParts.add(symbol);
return Literal.newLiteral(true);
}
}
return symbol;
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.rest.service.api.history;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.task.Task;
import org.flowable.engine.test.Deployment;
import org.flowable.rest.service.BaseSpringRestTestCase;
import org.flowable.rest.service.api.RestUrls;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.util.ISO8601DateFormat;
/**
* Test for REST-operation related to the historic task instance query resource.
*
* @author Tijs Rademakers
*/
public class HistoricTaskInstanceQueryResourceTest extends BaseSpringRestTestCase {
protected ISO8601DateFormat dateFormat = new ISO8601DateFormat();
/**
* Test querying historic task instance. POST query/historic-task-instances
*/
@Deployment
public void testQueryTaskInstances() throws Exception {
HashMap<String, Object> processVariables = new HashMap<String, Object>();
processVariables.put("stringVar", "Azerty");
processVariables.put("intVar", 67890);
processVariables.put("booleanVar", false);
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", "myBusinessKey", processVariables);
processEngineConfiguration.getClock().setCurrentTime(new GregorianCalendar(2013, 0, 1).getTime());
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
Task finishedTaskProcess1 = task;
taskService.complete(task.getId());
processEngineConfiguration.getClock().setCurrentTime(null);
task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
taskService.setVariableLocal(task.getId(), "local", "test");
taskService.setOwner(task.getId(), "test");
taskService.setDueDate(task.getId(), new GregorianCalendar(2013, 0, 1).getTime());
ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables);
Task task2 = taskService.createTaskQuery().processInstanceId(processInstance2.getId()).singleResult();
String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_HISTORIC_TASK_INSTANCE_QUERY);
// Process variables
ObjectNode requestNode = objectMapper.createObjectNode();
ArrayNode variableArray = objectMapper.createArrayNode();
ObjectNode variableNode = objectMapper.createObjectNode();
variableArray.add(variableNode);
requestNode.set("processVariables", variableArray);
variableNode.put("name", "stringVar");
variableNode.put("value", "Azerty");
variableNode.put("operation", "equals");
assertResultsPresentInPostDataResponse(url, requestNode, 3, task.getId(), task2.getId());
variableNode.put("name", "intVar");
variableNode.put("value", 67890);
variableNode.put("operation", "equals");
assertResultsPresentInPostDataResponse(url, requestNode, 3, task.getId(), task2.getId());
variableNode.put("name", "intVar");
variableNode.put("value", 67891);
variableNode.put("operation", "lessThan");
assertResultsPresentInPostDataResponse(url, requestNode, 3, task.getId(), task2.getId());
variableNode.put("name", "intVar");
variableNode.put("value", 67890);
variableNode.put("operation", "lessThan");
assertResultsPresentInPostDataResponse(url, requestNode);
variableNode.put("name", "intVar");
variableNode.put("value", 67890);
variableNode.put("operation", "lessThanOrEquals");
assertResultsPresentInPostDataResponse(url, requestNode, 3, task.getId(), task2.getId());
variableNode.put("name", "intVar");
variableNode.put("value", 67889);
variableNode.put("operation", "greaterThan");
assertResultsPresentInPostDataResponse(url, requestNode, 3, task.getId(), task2.getId());
variableNode.put("name", "intVar");
variableNode.put("value", 67890);
variableNode.put("operation", "greaterThan");
assertResultsPresentInPostDataResponse(url, requestNode);
variableNode.put("name", "intVar");
variableNode.put("value", 67890);
variableNode.put("operation", "greaterThanOrEquals");
assertResultsPresentInPostDataResponse(url, requestNode, 3, task.getId(), task2.getId());
variableNode.put("name", "stringVar");
variableNode.put("value", "Azer%");
variableNode.put("operation", "like");
assertResultsPresentInPostDataResponse(url, requestNode, 3, task.getId(), task2.getId());
variableNode.put("name", "local");
variableNode.put("value", "test");
variableNode.put("operation", "equals");
assertResultsPresentInPostDataResponse(url, requestNode, 0);
requestNode = objectMapper.createObjectNode();
variableArray = objectMapper.createArrayNode();
variableNode = objectMapper.createObjectNode();
variableArray.add(variableNode);
requestNode.set("taskVariables", variableArray);
variableNode.put("name", "local");
variableNode.put("value", "test");
variableNode.put("operation", "equals");
assertResultsPresentInPostDataResponse(url, requestNode, 1, task.getId());
requestNode = objectMapper.createObjectNode();
assertResultsPresentInPostDataResponse(url, requestNode, 3, task.getId(), task2.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("processInstanceId", processInstance.getId());
assertResultsPresentInPostDataResponse(url, requestNode, 2, task.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("processInstanceId", processInstance2.getId());
assertResultsPresentInPostDataResponse(url, requestNode, 1, task2.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("taskAssignee", "kermit");
assertResultsPresentInPostDataResponse(url, requestNode, 2, task2.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("taskAssigneeLike", "%mit");
assertResultsPresentInPostDataResponse(url, requestNode, 2, task2.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("taskAssignee", "fozzie");
assertResultsPresentInPostDataResponse(url, requestNode, 1, task.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("taskOwner", "test");
assertResultsPresentInPostDataResponse(url, requestNode, 1, task.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("taskOwnerLike", "t%");
assertResultsPresentInPostDataResponse(url, requestNode, 1, task.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("taskInvolvedUser", "test");
assertResultsPresentInPostDataResponse(url, requestNode, 1, task.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("dueDateAfter", dateFormat.format(new GregorianCalendar(2010, 0, 1).getTime()));
assertResultsPresentInPostDataResponse(url, requestNode, 1, task.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("dueDateAfter", dateFormat.format(new GregorianCalendar(2013, 4, 1).getTime()));
assertResultsPresentInPostDataResponse(url, requestNode, 0);
requestNode = objectMapper.createObjectNode();
requestNode.put("dueDateBefore", dateFormat.format(new GregorianCalendar(2010, 0, 1).getTime()));
assertResultsPresentInPostDataResponse(url, requestNode, 0);
requestNode = objectMapper.createObjectNode();
requestNode.put("dueDateBefore", dateFormat.format(new GregorianCalendar(2013, 4, 1).getTime()));
assertResultsPresentInPostDataResponse(url, requestNode, 1, task.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("taskCompletedAfter", dateFormat.format(new GregorianCalendar(2010, 0, 1).getTime()));
assertResultsPresentInPostDataResponse(url, requestNode, 1, finishedTaskProcess1.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("taskCompletedAfter", dateFormat.format(new GregorianCalendar(2013, 4, 1).getTime()));
assertResultsPresentInPostDataResponse(url, requestNode, 0);
requestNode = objectMapper.createObjectNode();
requestNode.put("taskCompletedBefore", dateFormat.format(new GregorianCalendar(2010, 0, 1).getTime()));
assertResultsPresentInPostDataResponse(url, requestNode, 0);
requestNode = objectMapper.createObjectNode();
requestNode.put("taskCompletedAfter", dateFormat.format(new GregorianCalendar(2010, 3, 1).getTime()));
assertResultsPresentInPostDataResponse(url, requestNode, 1, finishedTaskProcess1.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("processBusinessKey", "myBusinessKey");
assertResultsPresentInPostDataResponse(url, requestNode, 2, task.getId(), finishedTaskProcess1.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("processBusinessKeyLike", "myBusiness%");
assertResultsPresentInPostDataResponse(url, requestNode, 2, task.getId(), finishedTaskProcess1.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("processDefinitionKey", "someTaskProcess");
assertResultsPresentInPostDataResponse(url, requestNode, 0);
requestNode = objectMapper.createObjectNode();
requestNode.put("processDefinitionKey", "oneTaskProcess");
assertResultsPresentInPostDataResponse(url, requestNode, task.getId(), finishedTaskProcess1.getId(), task2.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("processDefinitionKeyLike", "oneTask%");
assertResultsPresentInPostDataResponse(url, requestNode, task.getId(), finishedTaskProcess1.getId(), task2.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("processDefinitionKeyLike", "some%");
assertResultsPresentInPostDataResponse(url, requestNode);
requestNode = objectMapper.createObjectNode();
requestNode.put("taskDefinitionKey", "processTask");
assertResultsPresentInPostDataResponse(url, requestNode, finishedTaskProcess1.getId(), task2.getId());
}
protected void assertResultsPresentInPostDataResponse(String url, ObjectNode body, int numberOfResultsExpected, String... expectedTaskIds) throws JsonProcessingException, IOException {
// Do the actual call
HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + url);
httpPost.setEntity(new StringEntity(body.toString()));
CloseableHttpResponse response = executeRequest(httpPost, HttpStatus.SC_OK);
JsonNode dataNode = objectMapper.readTree(response.getEntity().getContent()).get("data");
closeResponse(response);
assertEquals(numberOfResultsExpected, dataNode.size());
// Check presence of ID's
if (expectedTaskIds != null) {
List<String> toBeFound = new ArrayList<String>(Arrays.asList(expectedTaskIds));
Iterator<JsonNode> it = dataNode.iterator();
while (it.hasNext()) {
String id = it.next().get("id").textValue();
toBeFound.remove(id);
}
assertTrue("Not all entries have been found in result, missing: " + StringUtils.join(toBeFound, ", "), toBeFound.isEmpty());
}
}
}
| |
/*
* (C) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hp.ov.sdk.dto.servers.serverprofiletemplate;
import java.io.Serializable;
import java.util.List;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import com.google.gson.annotations.Since;
import com.google.gson.annotations.Until;
import com.hp.ov.sdk.dto.servers.BootTarget;
import com.hp.ov.sdk.dto.servers.BootVolumeSource;
import com.hp.ov.sdk.dto.servers.ChapLevel;
import com.hp.ov.sdk.dto.servers.InitiatorNameSource;
public class ConnectionBootTemplate implements Serializable {
private static final long serialVersionUID = 1L;
@Since(300)
private BootVolumeSource bootVolumeSource;
@Since(300)
private ChapLevel chapLevel;
@Since(300)
private String firstBootTargetIp;
@Since(300)
private String firstBootTargetPort;
@Since(300)
private String initiatorGateway;
@Since(300)
private InitiatorNameSource initiatorNameSource;
@Since(300)
private String initiatorSubnetMask;
@Since(300)
private Integer initiatorVlanId;
private String priority;
@Since(300)
private String secondBootTargetIp;
@Since(300)
private String secondBootTargetPort;
@Until(299)
private Boolean specifyBootTarget;
private List<BootTarget> targets;
/**
* @return the bootVolumeSource
*/
public BootVolumeSource getBootVolumeSource() {
return bootVolumeSource;
}
/**
* @param bootVolumeSource the bootVolumeSource to set
*/
public void setBootVolumeSource(BootVolumeSource bootVolumeSource) {
this.bootVolumeSource = bootVolumeSource;
}
/**
* @return the chapLevel
*/
public ChapLevel getChapLevel() {
return chapLevel;
}
/**
* @param chapLevel the chapLevel to set
*/
public void setChapLevel(ChapLevel chapLevel) {
this.chapLevel = chapLevel;
}
/**
* @return the firstBootTargetIp
*/
public String getFirstBootTargetIp() {
return firstBootTargetIp;
}
/**
* @param firstBootTargetIp the firstBootTargetIp to set
*/
public void setFirstBootTargetIp(String firstBootTargetIp) {
this.firstBootTargetIp = firstBootTargetIp;
}
/**
* @return the firstBootTargetPort
*/
public String getFirstBootTargetPort() {
return firstBootTargetPort;
}
/**
* @param firstBootTargetPort the firstBootTargetPort to set
*/
public void setFirstBootTargetPort(String firstBootTargetPort) {
this.firstBootTargetPort = firstBootTargetPort;
}
/**
* @return the initiatorGateway
*/
public String getInitiatorGateway() {
return initiatorGateway;
}
/**
* @param initiatorGateway the initiatorGateway to set
*/
public void setInitiatorGateway(String initiatorGateway) {
this.initiatorGateway = initiatorGateway;
}
/**
* @return the initiatorNameSource
*/
public InitiatorNameSource getInitiatorNameSource() {
return initiatorNameSource;
}
/**
* @param initiatorNameSource the initiatorNameSource to set
*/
public void setInitiatorNameSource(InitiatorNameSource initiatorNameSource) {
this.initiatorNameSource = initiatorNameSource;
}
/**
* @return the initiatorSubnetMask
*/
public String getInitiatorSubnetMask() {
return initiatorSubnetMask;
}
/**
* @param initiatorSubnetMask the initiatorSubnetMask to set
*/
public void setInitiatorSubnetMask(String initiatorSubnetMask) {
this.initiatorSubnetMask = initiatorSubnetMask;
}
/**
* @return the initiatorVlanId
*/
public Integer getInitiatorVlanId() {
return initiatorVlanId;
}
/**
* @param initiatorVlanId the initiatorVlanId to set
*/
public void setInitiatorVlanId(Integer initiatorVlanId) {
this.initiatorVlanId = initiatorVlanId;
}
/**
* @return the priority
*/
public String getPriority() {
return priority;
}
/**
* @param priority the priority to set
*/
public void setPriority(String priority) {
this.priority = priority;
}
/**
* @return the secondBootTargetIp
*/
public String getSecondBootTargetIp() {
return secondBootTargetIp;
}
/**
* @param secondBootTargetIp the secondBootTargetIp to set
*/
public void setSecondBootTargetIp(String secondBootTargetIp) {
this.secondBootTargetIp = secondBootTargetIp;
}
/**
* @return the secondBootTargetPort
*/
public String getSecondBootTargetPort() {
return secondBootTargetPort;
}
/**
* @param secondBootTargetPort the secondBootTargetPort to set
*/
public void setSecondBootTargetPort(String secondBootTargetPort) {
this.secondBootTargetPort = secondBootTargetPort;
}
/**
* @return the specifyBootTarget
*/
public Boolean getSpecifyBootTarget() {
return specifyBootTarget;
}
/**
* @param specifyBootTarget the specifyBootTarget to set
*/
public void setSpecifyBootTarget(Boolean specifyBootTarget) {
this.specifyBootTarget = specifyBootTarget;
}
/**
* @return the targets
*/
public List<BootTarget> getTargets() {
return targets;
}
/**
* @param targets the targets to set
*/
public void setTargets(List<BootTarget> targets) {
this.targets = targets;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
@Override
public boolean equals(Object obj) {
return EqualsBuilder.reflectionEquals(this, obj);
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.cli;
import com.facebook.buck.core.build.context.BuildContext;
import com.facebook.buck.core.build.engine.BuildEngine;
import com.facebook.buck.core.build.execution.context.StepExecutionContext;
import com.facebook.buck.core.config.BuckConfig;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.filesystems.AbsPath;
import com.facebook.buck.core.filesystems.RelPath;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.UnconfiguredTargetConfiguration;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter;
import com.facebook.buck.core.test.event.IndividualTestEvent;
import com.facebook.buck.core.test.event.TestRunEvent;
import com.facebook.buck.core.test.event.TestStatusMessageEvent;
import com.facebook.buck.core.test.event.TestSummaryEvent;
import com.facebook.buck.core.test.rule.TestRule;
import com.facebook.buck.core.toolchain.tool.Tool;
import com.facebook.buck.core.util.log.Logger;
import com.facebook.buck.downwardapi.config.DownwardApiConfig;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.ConsoleEvent;
import com.facebook.buck.io.filesystem.BuildCellRelativePath;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.javacd.model.BaseCommandParams.SpoolMode;
import com.facebook.buck.jvm.core.JavaLibrary;
import com.facebook.buck.jvm.java.CompilerOutputPaths;
import com.facebook.buck.jvm.java.DefaultJavaPackageFinder;
import com.facebook.buck.jvm.java.GenerateCodeCoverageReportStep;
import com.facebook.buck.jvm.java.JacocoConstants;
import com.facebook.buck.jvm.java.JavaBuckConfig;
import com.facebook.buck.jvm.java.JavaOptions;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.StepFailedException;
import com.facebook.buck.step.StepRunner;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.test.CoverageReportFormat;
import com.facebook.buck.test.TestCaseSummary;
import com.facebook.buck.test.TestResultSummary;
import com.facebook.buck.test.TestResults;
import com.facebook.buck.test.TestRuleEvent;
import com.facebook.buck.test.TestRunningOptions;
import com.facebook.buck.test.TestStatusMessage;
import com.facebook.buck.test.result.type.ResultType;
import com.facebook.buck.util.Threads;
import com.facebook.buck.util.concurrent.MoreFutures;
import com.facebook.buck.util.types.Either;
import com.facebook.buck.util.types.Unit;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.io.Files;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.SettableFuture;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/** Utility class for running tests from {@link TestRule}s which have been built. */
public class TestRunning {
private static final Logger LOG = Logger.get(TestRunning.class);
// Utility class; do not instantiate.
private TestRunning() {}
@SuppressWarnings("PMD.EmptyCatchBlock")
public static int runTests(
CommandRunnerParams params,
Iterable<TestRule> tests,
ImmutableSet<JavaLibrary> rulesUnderTestForCoverage,
StepExecutionContext executionContext,
TestRunningOptions options,
ListeningExecutorService service,
BuildEngine buildEngine,
BuildContext buildContext,
SourcePathRuleFinder ruleFinder)
throws IOException, InterruptedException {
// If needed, we first run instrumentation on the class files.
if (options.isCodeCoverageEnabled() && !rulesUnderTestForCoverage.isEmpty()) {
try {
// We'll use the filesystem of the first rule under test. This will fail if there are any
// tests from a different repo, but it'll help us bootstrap ourselves to being able to
// support multiple repos
// TODO(t8220837): Support tests in multiple repos
JavaLibrary library = rulesUnderTestForCoverage.iterator().next();
for (Step step :
MakeCleanDirectoryStep.of(
BuildCellRelativePath.fromCellRelativePath(
buildContext.getBuildCellRootPath(),
library.getProjectFilesystem(),
JacocoConstants.getJacocoOutputDir(library.getProjectFilesystem())))) {
StepRunner.runStep(executionContext, step, Optional.empty());
}
} catch (StepFailedException e) {
params
.getBuckEventBus()
.post(ConsoleEvent.severe(Throwables.getRootCause(e).getLocalizedMessage()));
return 1;
}
} else {
rulesUnderTestForCoverage = ImmutableSet.of();
}
ImmutableSet<String> testTargets =
FluentIterable.from(tests)
.transform(BuildRule::getBuildTarget)
.transform(Object::toString)
.toSet();
int totalNumberOfTests = Iterables.size(tests);
params
.getBuckEventBus()
.post(
TestRunEvent.started(
options.isRunAllTests(),
options.getTestSelectorList(),
options.shouldExplainTestSelectorList(),
testTargets));
// Start running all of the tests. The result of each java_test() rule is represented as a
// ListenableFuture.
List<ListenableFuture<TestResults>> results = new ArrayList<>();
AtomicInteger lastReportedTestSequenceNumber = new AtomicInteger();
List<TestRun> separateTestRuns = new ArrayList<>();
List<TestRun> parallelTestRuns = new ArrayList<>();
for (TestRule test : tests) {
// Determine whether the test needs to be executed.
Callable<TestResults> resultsInterpreter =
getCachingCallable(
test.interpretTestResults(
executionContext,
buildContext.getSourcePathResolver(),
/*isUsingTestSelectors*/ !options.getTestSelectorList().isEmpty()));
Map<String, UUID> testUUIDMap = new HashMap<>();
AtomicReference<TestStatusMessageEvent.Started> currentTestStatusMessageEvent =
new AtomicReference<>();
TestRule.TestReportingCallback testReportingCallback =
new TestRule.TestReportingCallback() {
@Override
public void testsDidBegin() {
LOG.debug("Tests for rule %s began", test.getBuildTarget());
}
@Override
public void statusDidBegin(TestStatusMessage didBeginMessage) {
LOG.debug("Test status did begin: %s", didBeginMessage);
TestStatusMessageEvent.Started startedEvent =
TestStatusMessageEvent.started(didBeginMessage);
TestStatusMessageEvent.Started previousEvent =
currentTestStatusMessageEvent.getAndSet(startedEvent);
Preconditions.checkState(
previousEvent == null,
"Received begin status before end status (%s)",
previousEvent);
params.getBuckEventBus().post(startedEvent);
String message = didBeginMessage.getMessage();
if (message.toLowerCase().contains("debugger")) {
executionContext
.getStdErr()
.println(executionContext.getAnsi().asWarningText(message));
}
}
@Override
public void statusDidEnd(TestStatusMessage didEndMessage) {
LOG.debug("Test status did end: %s", didEndMessage);
TestStatusMessageEvent.Started previousEvent =
currentTestStatusMessageEvent.getAndSet(null);
Preconditions.checkState(
previousEvent != null,
"Received end status before begin status (%s)",
previousEvent);
params
.getBuckEventBus()
.post(TestStatusMessageEvent.finished(previousEvent, didEndMessage));
}
@Override
public void testDidBegin(String testCaseName, String testName) {
LOG.debug(
"Test rule %s test case %s test name %s began",
test.getBuildTarget(), testCaseName, testName);
UUID testUUID = UUID.randomUUID();
// UUID is immutable and thread-safe as of Java 7, so it's
// safe to stash in a map and use later:
//
// http://bugs.java.com/view_bug.do?bug_id=6611830
testUUIDMap.put(testCaseName + ":" + testName, testUUID);
params
.getBuckEventBus()
.post(TestSummaryEvent.started(testUUID, testCaseName, testName));
}
@Override
public void testDidEnd(TestResultSummary testResultSummary) {
LOG.debug("Test rule %s test did end: %s", test.getBuildTarget(), testResultSummary);
UUID testUUID =
testUUIDMap.get(
testResultSummary.getTestCaseName() + ":" + testResultSummary.getTestName());
Objects.requireNonNull(testUUID);
params.getBuckEventBus().post(TestSummaryEvent.finished(testUUID, testResultSummary));
}
@Override
public void testsDidEnd(List<TestCaseSummary> testCaseSummaries) {
LOG.debug("Test rule %s tests did end: %s", test.getBuildTarget(), testCaseSummaries);
}
};
List<Step> steps;
params.getBuckEventBus().post(IndividualTestEvent.started(testTargets));
ImmutableList.Builder<Step> stepsBuilder = ImmutableList.builder();
Preconditions.checkState(buildEngine.isRuleBuilt(test.getBuildTarget()));
List<Step> testSteps =
test.runTests(executionContext, options, buildContext, testReportingCallback);
if (!testSteps.isEmpty()) {
stepsBuilder.addAll(testSteps);
}
steps = stepsBuilder.build();
TestRun testRun =
ImmutableTestRun.ofImpl(test, steps, resultsInterpreter, testReportingCallback);
// Always run the commands, even if the list of commands as empty. There may be zero
// commands because the rule is cached, but its results must still be processed.
if (test.runTestSeparately()) {
LOG.debug("Running test %s in serial", test);
separateTestRuns.add(testRun);
} else {
LOG.debug("Running test %s in parallel", test);
parallelTestRuns.add(testRun);
}
}
for (TestRun testRun : parallelTestRuns) {
ListenableFuture<TestResults> testResults =
runStepsAndYieldResult(
executionContext,
testRun.getSteps(),
testRun.getTestResultsCallable(),
testRun.getTest().getBuildTarget(),
params.getBuckEventBus(),
service);
results.add(
transformTestResults(
params,
testResults,
testRun.getTest(),
testRun.getTestReportingCallback(),
testTargets,
lastReportedTestSequenceNumber,
totalNumberOfTests));
}
ListenableFuture<List<TestResults>> parallelTestStepsFuture = Futures.allAsList(results);
List<TestResults> completedResults = new ArrayList<>();
ListeningExecutorService directExecutorService = MoreExecutors.newDirectExecutorService();
ListenableFuture<Unit> uberFuture =
MoreFutures.addListenableCallback(
parallelTestStepsFuture,
new FutureCallback<List<TestResults>>() {
@Override
public void onSuccess(List<TestResults> parallelTestResults) {
LOG.debug("Parallel tests completed, running separate tests...");
completedResults.addAll(parallelTestResults);
List<ListenableFuture<TestResults>> separateResultsList = new ArrayList<>();
for (TestRun testRun : separateTestRuns) {
separateResultsList.add(
transformTestResults(
params,
runStepsAndYieldResult(
executionContext,
testRun.getSteps(),
testRun.getTestResultsCallable(),
testRun.getTest().getBuildTarget(),
params.getBuckEventBus(),
directExecutorService),
testRun.getTest(),
testRun.getTestReportingCallback(),
testTargets,
lastReportedTestSequenceNumber,
totalNumberOfTests));
}
ListenableFuture<List<TestResults>> serialResults =
Futures.allAsList(separateResultsList);
try {
completedResults.addAll(serialResults.get());
} catch (ExecutionException e) {
LOG.error(e, "Error fetching serial test results");
throw new HumanReadableException(e, "Error fetching serial test results");
} catch (InterruptedException e) {
LOG.error(e, "Interrupted fetching serial test results");
try {
serialResults.cancel(true);
} catch (CancellationException ignored) {
// Rethrow original InterruptedException instead.
}
Threads.interruptCurrentThread();
throw new HumanReadableException(e, "Test cancelled");
}
LOG.debug("Done running serial tests.");
}
@Override
public void onFailure(Throwable e) {
LOG.error(e, "Parallel tests failed, not running serial tests");
throw new HumanReadableException(e, "Parallel tests failed");
}
},
directExecutorService);
try {
// Block until all the tests have finished running.
uberFuture.get();
} catch (ExecutionException e) {
e.printStackTrace(params.getConsole().getStdErr());
return 1;
} catch (InterruptedException e) {
try {
uberFuture.cancel(true);
} catch (CancellationException ignored) {
// Rethrow original InterruptedException instead.
}
Threads.interruptCurrentThread();
throw e;
}
params.getBuckEventBus().post(TestRunEvent.finished(testTargets, completedResults));
// Write out the results as XML, if requested.
Optional<String> path = options.getPathToXmlTestOutput();
if (path.isPresent()) {
try (Writer writer = Files.newWriter(new File(path.get()), StandardCharsets.UTF_8)) {
writeXmlOutput(completedResults, writer);
}
}
// Generate the code coverage report.
if (options.isCodeCoverageEnabled() && !rulesUnderTestForCoverage.isEmpty()) {
try {
BuckConfig buckConfig = params.getBuckConfig();
JavaBuckConfig javaBuckConfig = buckConfig.getView(JavaBuckConfig.class);
DownwardApiConfig downwardApiConfig = buckConfig.getView(DownwardApiConfig.class);
DefaultJavaPackageFinder defaultJavaPackageFinder =
javaBuckConfig.createDefaultJavaPackageFinder();
JavaOptions javaOptions = javaBuckConfig.getDefaultJavaOptionsForCodeCoverage();
StepRunner.runStep(
executionContext,
getReportCommand(
rulesUnderTestForCoverage,
defaultJavaPackageFinder,
javaOptions.getJavaRuntime(),
params.getCells().getRootCell().getFilesystem(),
ruleFinder,
JacocoConstants.getJacocoOutputDir(params.getCells().getRootCell().getFilesystem()),
options.getCoverageReportFormats(),
options.getCoverageReportTitle(),
javaBuckConfig
.getDefaultJavacOptions(
params
.getTargetConfiguration()
.orElse(UnconfiguredTargetConfiguration.INSTANCE))
.getSpoolMode()
== SpoolMode.INTERMEDIATE_TO_DISK,
options.getCoverageIncludes(),
options.getCoverageExcludes(),
downwardApiConfig.isEnabledForTests()),
Optional.empty());
} catch (StepFailedException e) {
params
.getBuckEventBus()
.post(ConsoleEvent.severe(Throwables.getRootCause(e).getLocalizedMessage()));
return 1;
}
}
boolean failures =
Iterables.any(
completedResults,
results1 -> {
LOG.debug("Checking result %s for failure", results1);
return !results1.isSuccess();
});
// TODO(buck_team): convert to ExitCode
return failures ? 32 : 0;
}
private static ListenableFuture<TestResults> transformTestResults(
CommandRunnerParams params,
ListenableFuture<TestResults> originalTestResults,
TestRule testRule,
TestRule.TestReportingCallback testReportingCallback,
ImmutableSet<String> testTargets,
AtomicInteger lastReportedTestSequenceNumber,
int totalNumberOfTests) {
SettableFuture<TestResults> transformedTestResults = SettableFuture.create();
FutureCallback<TestResults> callback =
new FutureCallback<TestResults>() {
private TestResults postTestResults(TestResults testResults) {
if (!testRule.supportsStreamingTests()) {
// For test rules which don't support streaming tests, we'll
// stream test summary events after interpreting the
// results.
LOG.debug("Simulating streaming test events for rule %s", testRule);
testReportingCallback.testsDidBegin();
for (TestCaseSummary testCaseSummary : testResults.getTestCases()) {
for (TestResultSummary testResultSummary : testCaseSummary.getTestResults()) {
testReportingCallback.testDidBegin(
testResultSummary.getTestCaseName(), testResultSummary.getTestName());
testReportingCallback.testDidEnd(testResultSummary);
}
}
testReportingCallback.testsDidEnd(testResults.getTestCases());
LOG.debug("Done simulating streaming test events for rule %s", testRule);
}
TestResults transformedTestResults =
TestResults.builder()
.from(testResults)
.setSequenceNumber(lastReportedTestSequenceNumber.incrementAndGet())
.setTotalNumberOfTests(totalNumberOfTests)
.build();
params
.getBuckEventBus()
.post(IndividualTestEvent.finished(testTargets, transformedTestResults));
return transformedTestResults;
}
@Override
public void onSuccess(TestResults testResults) {
LOG.debug("Transforming successful test results %s", testResults);
postTestResults(testResults);
transformedTestResults.set(testResults);
}
@Override
public void onFailure(Throwable throwable) {
LOG.info(throwable, "Test command step failed, marking %s as failed", testRule);
// If the test command steps themselves fail, report this as special test result.
TestResults testResults =
TestResults.of(
testRule.getBuildTarget(),
ImmutableList.of(
new TestCaseSummary(
testRule.getBuildTarget().toString(),
ImmutableList.of(
new TestResultSummary(
testRule.getBuildTarget().toString(),
"main",
ResultType.FAILURE,
0L,
throwable.getMessage(),
Throwables.getStackTraceAsString(throwable),
"",
"")))),
testRule.getContacts(),
testRule.getLabels().stream()
.map(Object::toString)
.collect(ImmutableSet.toImmutableSet()));
TestResults newTestResults = postTestResults(testResults);
transformedTestResults.set(newTestResults);
}
};
Futures.addCallback(originalTestResults, callback);
return transformedTestResults;
}
private static Callable<TestResults> getCachingCallable(Callable<TestResults> callable) {
return new Callable<TestResults>() {
@Nullable private Either<TestResults, Exception> result = null;
@Override
public synchronized TestResults call() throws Exception {
if (result == null) {
try {
result = Either.ofLeft(callable.call());
} catch (Exception t) {
result = Either.ofRight(t);
}
}
if (result.isRight()) {
throw result.getRight();
}
return result.getLeft();
}
};
}
/**
* Writes the test results in XML format to the supplied writer.
*
* <p>This method does NOT close the writer object.
*
* @param allResults The test results.
* @param writer The writer in which the XML data will be written to.
*/
public static void writeXmlOutput(List<TestResults> allResults, Writer writer)
throws IOException {
try {
// Build the XML output.
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = dbf.newDocumentBuilder();
Document doc = docBuilder.newDocument();
// Create the <tests> tag. All test data will be within this tag.
Element testsEl = doc.createElement("tests");
doc.appendChild(testsEl);
for (TestResults results : allResults) {
for (TestCaseSummary testCase : results.getTestCases()) {
// Create the <test name="..." status="..." time="..."> tag.
// This records a single test case result in the test suite.
Element testEl = doc.createElement("test");
testEl.setAttribute("name", testCase.getTestCaseName());
testEl.setAttribute("status", testCase.isSuccess() ? "PASS" : "FAIL");
testEl.setAttribute("time", Long.toString(testCase.getTotalTime()));
testEl.setAttribute("target", results.getBuildTarget().getFullyQualifiedName());
testsEl.appendChild(testEl);
// Loop through the test case and add XML data (name, message, and
// stacktrace) for each individual test, if present.
addExtraXmlInfo(testCase, testEl);
}
}
// Write XML to the writer.
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.transform(new DOMSource(doc), new StreamResult(writer));
} catch (TransformerException | ParserConfigurationException ex) {
throw new IOException("Unable to build the XML document!");
}
}
/**
* A helper method that adds extra XML.
*
* <p>This includes a test name, time (in ms), message, and stack trace, when present. Example:
*
* <pre>
* <testresult name="failed_test" time="200">
* <message>Reason for test failure</message>
* <stacktrace>Stacktrace here</stacktrace>
* </testresult>
* </pre>
*
* @param testCase The test case summary containing one or more tests.
* @param testEl The XML element object for the <test> tag, in which extra information tags will
* be added.
*/
@VisibleForTesting
static void addExtraXmlInfo(TestCaseSummary testCase, Element testEl) {
Document doc = testEl.getOwnerDocument();
// Loop through the test case and extract test data.
for (TestResultSummary testResult : testCase.getTestResults()) {
// Extract the test name and time.
String name = Strings.nullToEmpty(testResult.getTestName());
String time = Long.toString(testResult.getTime());
String status = testResult.isSuccess() ? "PASS" : "FAIL";
String type = testResult.getType().name();
// Create the tag: <testresult name="..." time="...">
Element testResultEl = doc.createElement("testresult");
testResultEl.setAttribute("name", name);
testResultEl.setAttribute("time", time);
testResultEl.setAttribute("status", status);
testResultEl.setAttribute("type", type);
testEl.appendChild(testResultEl);
// Create the tag: <message>(Error message here)</message>
Element messageEl = doc.createElement("message");
String message = Strings.nullToEmpty(testResult.getMessage());
messageEl.appendChild(doc.createTextNode(message));
testResultEl.appendChild(messageEl);
// Create the tag: <stacktrace>(Stacktrace here)</stacktrace>
Element stacktraceEl = doc.createElement("stacktrace");
String stacktrace = Strings.nullToEmpty(testResult.getStacktrace());
stacktraceEl.appendChild(doc.createTextNode(stacktrace));
testResultEl.appendChild(stacktraceEl);
}
}
/**
* Returns the ShellCommand object that is supposed to generate a code coverage report from data
* obtained during the test run. This method will also generate a set of source paths to the class
* files tested during the test run.
*/
private static Step getReportCommand(
ImmutableSet<JavaLibrary> rulesUnderTest,
DefaultJavaPackageFinder defaultJavaPackageFinder,
Tool javaRuntimeLauncher,
ProjectFilesystem filesystem,
SourcePathRuleFinder ruleFinder,
Path outputDirectory,
Set<CoverageReportFormat> formats,
String title,
boolean useIntermediateClassesDir,
Optional<String> coverageIncludes,
Optional<String> coverageExcludes,
boolean withDownwardApi) {
ImmutableSet.Builder<String> srcDirectories = ImmutableSet.builder();
ImmutableSet.Builder<Path> pathsToJars = ImmutableSet.builder();
// Add all source directories of java libraries that we are testing to -sourcepath.
SourcePathResolverAdapter sourcePathResolver = ruleFinder.getSourcePathResolver();
for (JavaLibrary rule : rulesUnderTest) {
ImmutableSet<String> sourceFolderPath =
getPathToSourceFolders(rule, ruleFinder, defaultJavaPackageFinder);
if (!sourceFolderPath.isEmpty()) {
srcDirectories.addAll(sourceFolderPath);
}
RelPath classesItem = null;
if (useIntermediateClassesDir) {
classesItem =
CompilerOutputPaths.of(rule.getBuildTarget(), filesystem.getBuckPaths())
.getClassesDir();
}
// If we aren't configured to use the classes dir on disk, or it wasn't part of this
// compilation run, then we'll need to unzip the output jar to get access to the classes
if (classesItem == null || !filesystem.isDirectory(classesItem)) {
SourcePath path = rule.getSourcePathToOutput();
if (path != null) {
classesItem = sourcePathResolver.getCellUnsafeRelPath(path);
}
}
if (classesItem == null) {
continue;
}
pathsToJars.add(classesItem.getPath());
}
return new GenerateCodeCoverageReportStep(
javaRuntimeLauncher.getCommandPrefix(sourcePathResolver),
filesystem,
srcDirectories.build(),
pathsToJars.build(),
outputDirectory,
formats,
title,
coverageIncludes,
coverageExcludes,
withDownwardApi);
}
/** Returns a set of source folders of the java files of a library. */
@VisibleForTesting
static ImmutableSet<String> getPathToSourceFolders(
JavaLibrary rule,
SourcePathRuleFinder ruleFinder,
DefaultJavaPackageFinder defaultJavaPackageFinder) {
ImmutableSet<SourcePath> javaSrcs = rule.getJavaSrcs();
// A Java library rule with just resource files has an empty javaSrcs.
if (javaSrcs.isEmpty()) {
return ImmutableSet.of();
}
// Iterate through all source paths to make sure we are generating a complete set of source
// folders for the source paths.
Set<String> srcFolders = new HashSet<>();
loopThroughSourcePath:
for (SourcePath javaSrcPath : javaSrcs) {
if (ruleFinder.getRule(javaSrcPath).isPresent()) {
continue;
}
RelPath javaSrcRelativePath =
ruleFinder.getSourcePathResolver().getCellUnsafeRelPath(javaSrcPath);
// If the source path is already under a known source folder, then we can skip this
// source path.
for (String srcFolder : srcFolders) {
if (javaSrcRelativePath.startsWith(srcFolder)) {
continue loopThroughSourcePath;
}
}
// If the source path is under one of the source roots, then we can just add the source
// root.
ImmutableSortedSet<String> pathsFromRoot = defaultJavaPackageFinder.getPathsFromRoot();
for (String root : pathsFromRoot) {
if (javaSrcRelativePath.startsWith(root)) {
srcFolders.add(root);
continue loopThroughSourcePath;
}
}
// Traverse the file system from the parent directory of the java file until we hit the
// parent of the src root directory.
ImmutableSet<String> pathElements = defaultJavaPackageFinder.getPathElements();
AbsPath directory =
ruleFinder.getSourcePathResolver().getAbsolutePath(javaSrcPath).getParent();
if (pathElements.isEmpty()) {
continue;
}
while (directory != null
&& directory.getFileName() != null
&& !pathElements.contains(directory.getFileName().toString())) {
directory = directory.getParent();
}
if (directory == null || directory.getFileName() == null) {
continue;
}
String directoryPath = directory.toString();
if (!directoryPath.endsWith("/")) {
directoryPath += "/";
}
srcFolders.add(directoryPath);
}
return ImmutableSet.copyOf(srcFolders);
}
private static ListenableFuture<TestResults> runStepsAndYieldResult(
StepExecutionContext context,
List<Step> steps,
Callable<TestResults> interpretResults,
BuildTarget buildTarget,
BuckEventBus eventBus,
ListeningExecutorService listeningExecutorService) {
Preconditions.checkState(!listeningExecutorService.isShutdown());
Callable<TestResults> callable =
() -> {
LOG.debug("Test steps will run for %s", buildTarget);
eventBus.post(TestRuleEvent.started(buildTarget));
for (Step step : steps) {
StepRunner.runStep(context, step, Optional.of(buildTarget));
}
LOG.debug("Test steps did run for %s", buildTarget);
eventBus.post(TestRuleEvent.finished(buildTarget));
return interpretResults.call();
};
return listeningExecutorService.submit(callable);
}
}
| |
package utils.shapes.human;
public class HumanPos {
public static final float[] pos =
{
0.031671107f,1.563226f,0.06734592f,
0.035571873f,1.5593252f,0.06734592f,
0.031671107f,1.5553868f,0.06734592f,
0.027732491f,1.5593252f,0.06734592f,
0.031671107f,1.5593252f,0.07124668f,
0.031671107f,1.5593252f,0.06340724f,
-0.031725526f,1.5593252f,0.06340724f,
-0.031725526f,1.5593252f,0.07124668f,
-0.02778691f,1.5593252f,0.06734592f,
-0.031725526f,1.5553868f,0.06734592f,
-0.035626233f,1.5593252f,0.06734592f,
-0.031725526f,1.563226f,0.06734592f,
0.033375323f,1.5585678f,0.08249444f,
0.032996595f,1.557962f,0.08249444f,
0.032390654f,1.5575454f,0.08249444f,
0.031671107f,1.5573938f,0.08249444f,
0.03095156f,1.5575454f,0.08249444f,
0.03034556f,1.557962f,0.08249444f,
0.02996689f,1.5585678f,0.08249444f,
0.029815376f,1.5592494f,0.08249444f,
0.02996689f,1.5599692f,0.08249444f,
0.03034556f,1.560575f,0.08249444f,
0.03095156f,1.5609918f,0.08249444f,
0.031671107f,1.5611432f,0.08249444f,
0.032390654f,1.5609918f,0.08249444f,
0.032996595f,1.560575f,0.08249444f,
0.033375323f,1.5599692f,0.08249444f,
0.03352678f,1.5592494f,0.08249444f,
0.036480784f,1.5572803f,0.082873106f,
0.03534466f,1.5555761f,0.08279741f,
0.033678293f,1.55444f,0.082873106f,
0.031671107f,1.5540612f,0.082873106f,
0.02966392f,1.55444f,0.082873106f,
0.027997553f,1.5555761f,0.08279741f,
0.026861489f,1.5572803f,0.082873106f,
0.026444852f,1.5592873f,0.082873106f,
0.026861489f,1.5612569f,0.082873106f,
0.027997553f,1.5629609f,0.08279741f,
0.02966392f,1.5640972f,0.082873106f,
0.031671107f,1.5644758f,0.082873106f,
0.033678293f,1.5640972f,0.082873106f,
0.03534466f,1.5629609f,0.08279741f,
0.036480784f,1.5612569f,0.082873106f,
0.03689736f,1.5592873f,0.082873106f,
0.026293397f,1.6220024f,0.08397144f,
0.0108418465f,1.464041f,0.09306049f,
0.010084391f,1.4471505f,0.07844216f,
0.0037977695f,1.4914978f,0.09590089f,
0.0021693707f,1.491384f,0.09631747f,
0.014780462f,1.4917626f,0.09275752f,
0.0105009675f,1.4918385f,0.094878376f,
0.007471323f,1.4914598f,0.0952571f,
0.017734468f,1.491687f,0.08942491f,
0.030231953f,1.4562774f,0.063634515f,
0.0073577166f,1.4789245f,0.09858978f,
0.016295373f,1.482333f,0.094537556f,
0.012015879f,1.5690582f,0.09283328f,
0.0028510094f,1.5627716f,0.0952571f,
0.0034569502f,1.5200906f,0.11222339f,
0.023831725f,1.5557275f,0.08264589f,
0.02712655f,1.5547807f,0.083857834f,
0.026066184f,1.4915354f,0.08787215f,
0.02523297f,1.4901721f,0.086054325f,
0.020688355f,1.4858549f,0.09022021f,
0.023074329f,1.4875212f,0.08874327f,
0.03500372f,1.5546672f,0.08389574f,
0.03788197f,1.5554245f,0.083251834f,
0.030042589f,1.5545914f,0.08431232f,
0.024172544f,1.4782805f,0.0864709f,
0.03977555f,1.5561063f,0.08215356f,
0.041782737f,1.5570908f,0.07999498f,
0.042540193f,1.5577347f,0.07923752f,
0.043070376f,1.5583785f,0.07870728f,
0.043183982f,1.558795f,0.07844216f,
0.042881012f,1.5592873f,0.0795784f,
0.018113136f,1.5153568f,0.091394186f,
0.026141942f,1.5617113f,0.08363056f,
0.022354782f,1.5603101f,0.083251834f,
0.019930959f,1.4907024f,0.09150779f,
0.018567622f,1.4976707f,0.09733999f,
0.022998571f,1.4944139f,0.092037976f,
0.013303518f,1.500246f,0.100559056f,
0.03095156f,1.5626581f,0.08461523f,
0.04170704f,1.5602722f,0.0804494f,
0.0032675862f,1.4997158f,0.10495216f,
0.00819087f,1.5022154f,0.1033237f,
0.005615592f,1.5010791f,0.10449773f,
0.02557385f,1.4910054f,0.08696324f,
0.03254217f,1.5546293f,0.08419865f,
0.6638195f,1.3289919f,-0.009381473f,
0.7008956f,1.3542142f,-0.0022994876f,
0.7197177f,1.3616369f,-0.0174101f,
0.72074014f,1.361599f,-0.0366109f,
0.7200206f,1.3613718f,-0.02430278f,
0.71914953f,1.3614097f,-0.042518914f,
0.68574697f,1.3519418f,0.015310645f,
0.09287119f,1.2367373f,-0.09197885f,
0.15259433f,1.2712002f,-0.08186722f,
0.10900438f,1.2038648f,0.07836646f,
0.77531284f,1.3792472f,-0.037065327f,
0.77554005f,1.3792472f,-0.023924053f,
0.7541426f,1.3685675f,-0.036573052f,
0.75368816f,1.3687189f,-0.023810446f,
0.77705485f,1.3702717f,-0.035588384f,
0.77690333f,1.3700066f,-0.024870813f,
0.75368816f,1.3800044f,-0.02430278f,
0.75387746f,1.3788683f,-0.03725469f,
0.7954604f,1.3809135f,-0.034906745f,
0.7958769f,1.3804967f,-0.026423514f,
0.038942397f,1.3661437f,0.037465394f,
0.121426165f,1.3868971f,0.015954554f,
0.14600468f,0.946832f,0.039927006f,
0.053144157f,0.8398834f,0.056363225f,
0.14448982f,1.3263786f,0.0337919f,
0.1693334f,1.3448219f,0.025346577f,
0.659843f,1.3618641f,-0.009229958f,
0.69578284f,1.3453522f,0.0019042492f,
0.69313186f,1.333915f,0.0066003203f,
0.229814f,1.3876166f,-0.008964896f,
0.14524722f,0.95898885f,-0.07262659f,
0.13320416f,1.035413f,-0.0290367f,
0.71202976f,1.3857989f,-0.04418522f,
0.7095303f,1.3854201f,-0.062363446f,
0.7110072f,1.3809891f,-0.07421714f,
0.052651823f,1.3978419f,-0.021121562f,
0.72793573f,1.3352785f,0.032049835f,
0.60542196f,1.3563728f,-0.0246436f,
0.6095877f,1.3270223f,-0.018129766f,
0.6906703f,1.3677721f,-0.0027160645f,
0.7125978f,1.3861396f,-0.016387641f,
0.71021193f,1.381027f,-0.0038900971f,
0.7130144f,1.3637197f,-0.07448226f,
0.7271026f,1.3339529f,0.021218657f,
0.020423293f,1.4469988f,0.01580298f,
0.01197803f,1.3921611f,0.014326036f,
0.681922f,1.3432314f,0.020461142f,
0.71380967f,1.3648937f,-6.710291E-4f,
0.72812515f,1.3450491f,0.030421376f,
0.7274434f,1.3448219f,0.019703746f,
0.14483064f,1.3022926f,-0.002261579f,
0.10957247f,1.1947758f,0.056060255f,
0.09665829f,1.3971982f,-0.081640005f,
0.03663224f,1.5031621f,-0.06327236f,
0.051288486f,1.4256396f,-0.06357533f,
0.13138634f,1.0165532f,0.043714166f,
0.17936933f,1.3019516f,-0.07823151f,
0.7158547f,1.3586073f,-0.061378777f,
0.7174075f,1.3590617f,-0.05622828f,
0.049508452f,1.4936943f,-0.024908662f,
0.22288352f,1.3837917f,-0.0728538f,
0.025346577f,1.2120073f,0.09431034f,
0.008304477f,1.2053418f,0.09567368f,
0.11506379f,1.1322501f,-0.0043445826f,
0.11589694f,1.0954392f,0.051402092f,
0.025384426f,1.4984281f,0.090485275f,
0.014780462f,1.4915354f,0.093325615f,
0.007963657f,1.4911189f,0.09718847f,
0.018264651f,1.4911568f,0.089993f,
0.020347536f,1.4910433f,0.0878343f,
0.022581995f,1.4910054f,0.08650881f,
0.023415148f,1.4908538f,0.08870536f,
0.023301542f,1.4908917f,0.08707684f,
0.020953536f,1.4918005f,0.09184867f,
0.057612956f,0.5274448f,-0.081299126f,
0.14100564f,0.7969373f,-0.1047793f,
0.019741595f,0.84079224f,-0.14098436f,
0.11457145f,0.012583971f,-0.07622433f,
0.1092695f,0.10237688f,-0.07459587f,
0.05212164f,0.09790802f,-0.052782f,
0.10620189f,0.09737784f,-0.024378479f,
0.017620862f,1.5572803f,0.081661284f,
0.018454015f,1.5578861f,0.08211571f,
0.020612657f,1.5581512f,0.08294886f,
0.018491864f,1.5562198f,0.08185065f,
0.017507255f,1.55675f,0.08154768f,
0.02023393f,1.5561063f,0.081623375f,
0.05015236f,0.99644345f,-0.10209048f,
0.050379574f,1.5120997f,0.07355678f,
0.021597326f,1.5566363f,0.08185065f,
0.03477651f,1.5626581f,0.083857834f,
0.067762494f,1.6072326f,0.04038155f,
0.07283723f,1.6239717f,-0.0013527274f,
0.06980759f,1.610641f,-0.03471738f,
0.06761098f,1.5842824f,-0.04543495f,
0.03284514f,1.6661983f,0.038904488f,
0.035117388f,1.6625247f,-0.048502564f,
0.038601518f,1.61742f,-0.08410156f,
0.012167394f,1.3691733f,0.03151965f,
0.0997259f,1.3850412f,0.003835678f,
0.03977555f,1.0524931f,-0.07993573f,
0.10355091f,0.5065399f,0.030459285f,
0.036139965f,0.502639f,-0.0076771975f,
0.12869751f,0.5155533f,-0.057023585f,
0.3542592f,1.3194861f,-0.07114953f,
0.3577054f,1.3586829f,-0.062401354f,
0.37327057f,1.3119497f,-0.010252476f,
0.37349778f,1.355653f,-0.010555446f,
0.6067473f,1.3582284f,-0.05838698f,
0.7123706f,1.3856096f,-0.021841109f,
0.7126357f,1.3853445f,-0.0378986f,
0.7112344f,1.3849277f,-0.05683416f,
0.7940213f,1.3725438f,-0.02593118f,
0.7943242f,1.3727331f,-0.035133958f,
0.61079973f,1.3273256f,-0.060886443f,
0.12820518f,0.03000474f,0.06420255f,
0.12937915f,0.012129486f,0.053447127f,
0.12600857f,0.014553249f,0.096771896f,
0.05079615f,0.011902273f,0.12892473f,
0.042085707f,0.011864424f,0.059544444f,
0.050076604f,0.027050793f,0.13320416f,
0.044320107f,0.043373346f,0.071549654f,
0.124872446f,0.021900296f,0.097415745f,
0.086698174f,1.5457294f,-0.018735707f,
0.08821297f,1.5575454f,-0.01517576f,
0.0862816f,1.5626202f,-0.010214627f,
0.07810134f,1.5583026f,0.0022450686f,
0.07192832f,1.5227416f,0.0068275332f,
0.07401121f,1.5142205f,-7.0887804E-4f,
0.07919967f,1.5270588f,-0.012676179f,
0.05780232f,1.5056238f,0.005236864f,
0.06620979f,1.516114f,-1.4084578E-4f,
0.07011056f,1.5338001f,0.017734468f,
0.06942886f,1.5214162f,0.009251237f,
0.082873106f,1.558265f,-0.01672846f,
0.0812068f,1.5637183f,-0.010934174f,
0.07488233f,1.5584922f,0.0049718022f,
0.08116895f,1.5455022f,-0.02085644f,
0.07454151f,1.5278921f,-0.013244331f,
0.08310038f,1.563226f,-0.0043445826f,
0.07844216f,1.5647788f,-0.0039658546f,
0.11404127f,1.0070095f,-0.08182931f,
0.10824698f,0.8324984f,-0.13814396f,
0.06980759f,0.098362565f,-0.08220804f,
0.069618225f,0.0070926547f,-0.08088249f,
0.03754115f,1.562052f,0.08306247f,
0.7883026f,1.3726196f,-0.010366082f,
0.78902215f,1.3729982f,-0.004041612f,
0.79046124f,1.3802695f,-0.002640307f,
0.78894645f,1.3802695f,-0.011085629f,
0.7495603f,1.3798151f,-0.015516579f,
0.74933296f,1.37902f,-0.0016556978f,
0.76982135f,1.3697793f,-8.224845E-4f,
0.7694048f,1.3695521f,-0.013660908f,
0.74895436f,1.3694763f,-0.0011254549f,
0.74884075f,1.3684537f,-0.014872789f,
0.7692912f,1.3788683f,-8.224845E-4f,
0.76959413f,1.37902f,-0.014569819f,
0.7894388f,1.3728848f,-0.055773795f,
0.7896281f,1.372771f,-0.04770726f,
0.79083997f,1.3800802f,-0.047631502f,
0.7893252f,1.3804967f,-0.05524361f,
0.7509994f,1.3785276f,-0.05781889f,
0.751113f,1.3789821f,-0.044374526f,
0.77133626f,1.3695142f,-0.046002984f,
0.7709575f,1.3700066f,-0.057553828f,
0.75050706f,1.3684537f,-0.044412434f,
0.75039345f,1.3686054f,-0.057099342f,
0.7707682f,1.3780732f,-0.044109464f,
0.7702759f,1.3787549f,-0.058046103f,
0.7598991f,1.3788683f,-0.07588345f,
0.76069444f,1.3767476f,-0.06327236f,
0.74315995f,1.3684537f,-0.07334614f,
0.74323565f,1.3681507f,-0.06482506f,
0.76114887f,1.370461f,-0.073800564f,
0.76092166f,1.3700442f,-0.06516588f,
0.7437659f,1.3767476f,-0.06293154f,
0.74334925f,1.3789821f,-0.07550478f,
0.7759566f,1.3783004f,-0.07444441f,
0.77561575f,1.3786032f,-0.06577182f,
0.77436584f,1.3719001f,-0.067135274f,
0.77429014f,1.3719757f,-0.0736112f,
0.047236204f,1.5636427f,0.078139186f,
-0.10830134f,0.8324984f,-0.13814396f,
-0.11409569f,1.0070095f,-0.08182931f,
-0.12875187f,0.5155533f,-0.057023585f,
-0.036194324f,0.502639f,-0.0076771975f,
-0.10356742f,0.5065399f,0.030459285f,
-0.03979212f,1.0524931f,-0.07993573f,
-0.05020672f,0.99644345f,-0.10209048f,
-0.019758165f,0.84079224f,-0.14098436f,
-0.14106011f,0.7969373f,-0.1047793f,
-0.057667434f,0.5274448f,-0.081299126f,
-2.7239323E-5f,0.8056099f,-0.0859952f,
-0.13144076f,1.0165532f,0.043714166f,
-0.13322067f,1.035413f,-0.0290367f,
-0.14526379f,0.95898885f,-0.07262659f,
-0.053198576f,0.8398834f,0.056363225f,
-0.14602125f,0.946832f,0.039927006f,
-2.7239323E-5f,1.4908538f,0.09571153f,
-2.7239323E-5f,1.478735f,0.10063481f,
-2.7239323E-5f,1.4992235f,0.105027914f,
-2.7239323E-5f,1.489907f,0.09953654f,
-0.078458786f,1.5647788f,-0.0039658546f,
-0.08311695f,1.563226f,-0.0043445826f,
-0.07455802f,1.5278921f,-0.013244331f,
-0.08122343f,1.5455022f,-0.02085644f,
-0.07493669f,1.5584922f,0.0049718022f,
-0.08126128f,1.5637183f,-0.010934174f,
-0.082889736f,1.558265f,-0.01672846f,
-0.06948322f,1.5214162f,0.009251237f,
-0.07012707f,1.5338001f,0.017734468f,
-0.0662263f,1.516114f,-1.4084578E-4f,
-0.05781889f,1.5056238f,0.005236864f,
-0.07921618f,1.5270588f,-0.012676179f,
-0.074065685f,1.5142205f,-7.0887804E-4f,
-0.0719828f,1.5227416f,0.0068275332f,
-0.078155816f,1.5583026f,0.0022450686f,
-0.08633602f,1.5626202f,-0.010214627f,
-0.08822954f,1.5575454f,-0.01517576f,
-0.086714745f,1.5457294f,-0.018735707f,
-0.038655996f,1.61742f,-0.08410156f,
-0.035171807f,1.6625247f,-0.048502564f,
-0.03289956f,1.6661983f,0.038904488f,
-0.06766546f,1.5842824f,-0.04543495f,
-0.0698241f,1.610641f,-0.03471738f,
-0.0728538f,1.6239717f,-0.0013527274f,
-0.06781691f,1.6072326f,0.04038155f,
-0.050433934f,1.5120997f,0.07355678f,
-0.021007895f,1.4918005f,0.09184867f,
-0.023318112f,1.4908917f,0.08707684f,
-0.023431718f,1.4908538f,0.08870536f,
-0.022598505f,1.4910054f,0.08650881f,
-0.020402014f,1.4910433f,0.0878343f,
-0.018281221f,1.4911568f,0.089993f,
-0.007980168f,1.4911189f,0.09718847f,
-0.01483494f,1.4915354f,0.093325615f,
-0.025400996f,1.4984281f,0.090485275f,
-0.04956293f,1.4936943f,-0.024908662f,
-0.03668666f,1.5031621f,-0.06327236f,
-0.020439863f,1.4469988f,0.01580298f,
-0.02559036f,1.4910054f,0.08696324f,
-0.005632162f,1.5010791f,0.10449773f,
-0.00824523f,1.5022154f,0.1033237f,
-0.0033220053f,1.4997158f,0.10495216f,
-0.013320088f,1.500246f,0.100559056f,
-0.023015141f,1.4944139f,0.092037976f,
-0.01862204f,1.4976707f,0.09733999f,
-0.019947529f,1.4907024f,0.09150779f,
-0.018129766f,1.5153568f,0.091394186f,
-0.024189115f,1.4782805f,0.0864709f,
-0.023128748f,1.4875212f,0.08874327f,
-0.020704985f,1.4858549f,0.09022021f,
-0.02528739f,1.4901721f,0.086054325f,
-0.026120543f,1.4915354f,0.08787215f,
-0.0029054284f,1.5627716f,0.0952571f,
-0.016349733f,1.482333f,0.094537556f,
-0.007374227f,1.4789245f,0.09858978f,
-0.030286431f,1.4562774f,0.063634515f,
-0.017788827f,1.491687f,0.08942491f,
-0.0074878335f,1.4914598f,0.0952571f,
-0.010555446f,1.4918385f,0.094878376f,
-0.01483494f,1.4917626f,0.09275752f,
-0.00222373f,1.491384f,0.09631747f,
-0.0038522482f,1.4914978f,0.09590089f,
-0.01010102f,1.4471505f,0.07844216f,
-0.0108962655f,1.464041f,0.09306049f,
-0.026347756f,1.6220024f,0.08397144f,
-0.7743446f,1.3719757f,-0.0736112f,
-0.7744204f,1.3719001f,-0.067135274f,
-0.77563226f,1.3786032f,-0.06577182f,
-0.776011f,1.3783004f,-0.07444441f,
-0.74340373f,1.3789821f,-0.07550478f,
-0.74378246f,1.3767476f,-0.06293154f,
-0.7609761f,1.3700442f,-0.06516588f,
-0.76116544f,1.370461f,-0.073800564f,
-0.7432901f,1.3681507f,-0.06482506f,
-0.74321437f,1.3684537f,-0.07334614f,
-0.7607488f,1.3767476f,-0.06327236f,
-0.75995356f,1.3788683f,-0.07588345f,
-0.7702924f,1.3787549f,-0.058046103f,
-0.7708226f,1.3780732f,-0.044109464f,
-0.7504478f,1.3686054f,-0.057099342f,
-0.7505614f,1.3684537f,-0.044412434f,
-0.7709741f,1.3700066f,-0.057553828f,
-0.7713907f,1.3695142f,-0.046002984f,
-0.75116736f,1.3789821f,-0.044374526f,
-0.7510159f,1.3785276f,-0.05781889f,
-0.78937954f,1.3804967f,-0.05524361f,
-0.79085654f,1.3800802f,-0.047631502f,
-0.7896825f,1.372771f,-0.04770726f,
-0.7894553f,1.3728848f,-0.055773795f,
-0.7696107f,1.37902f,-0.014569819f,
-0.76930773f,1.3788683f,-8.224845E-4f,
-0.7488572f,1.3684537f,-0.014872789f,
-0.7490087f,1.3694763f,-0.0011254549f,
-0.76942134f,1.3695521f,-0.013660908f,
-0.7698758f,1.3697793f,-8.224845E-4f,
-0.74934953f,1.37902f,-0.0016556978f,
-0.7495768f,1.3798151f,-0.015516579f,
-0.78900087f,1.3802695f,-0.011085629f,
-0.7904778f,1.3802695f,-0.002640307f,
-0.78907657f,1.3729982f,-0.004041612f,
-0.788357f,1.3726196f,-0.010366082f,
-0.610854f,1.3273256f,-0.060886443f,
-0.7943786f,1.3727331f,-0.035133958f,
-0.79403776f,1.3725438f,-0.02593118f,
-0.71128887f,1.3849277f,-0.05683416f,
-0.7126901f,1.3853445f,-0.0378986f,
-0.71238714f,1.3856096f,-0.021841109f,
-0.60676396f,1.3582284f,-0.05838698f,
-0.37355226f,1.355653f,-0.010555446f,
-0.37328717f,1.3119497f,-0.010252476f,
-0.3577599f,1.3586829f,-0.062401354f,
-0.35427573f,1.3194861f,-0.07114953f,
-0.09974241f,1.3850412f,0.003835678f,
-0.012221754f,1.3691733f,0.03151965f,
-0.11595142f,1.0954392f,0.051402092f,
-0.115118265f,1.1322501f,-0.0043445826f,
-0.008358896f,1.2053418f,0.09567368f,
-0.025363147f,1.2120073f,0.09431034f,
-0.2229001f,1.3837917f,-0.0728538f,
-0.7174619f,1.3590617f,-0.05622828f,
-0.7158713f,1.3586073f,-0.061378777f,
-0.1793859f,1.3019516f,-0.07823151f,
-0.051304996f,1.4256396f,-0.06357533f,
-0.09671277f,1.3971982f,-0.081640005f,
-0.10958898f,1.1947758f,0.056060255f,
-0.14488512f,1.3022926f,-0.002261579f,
-0.7274978f,1.3448219f,0.019703746f,
-0.72817945f,1.3450491f,0.030421376f,
-0.71382624f,1.3648937f,-6.710291E-4f,
-0.68197644f,1.3432314f,0.020461142f,
-0.01203239f,1.3921611f,0.014326036f,
-0.7271191f,1.3339529f,0.021218657f,
-0.71306884f,1.3637197f,-0.07448226f,
-0.71026635f,1.381027f,-0.0038900971f,
-0.7126522f,1.3861396f,-0.016387641f,
-0.69072473f,1.3677721f,-0.0027160645f,
-0.6096043f,1.3270223f,-0.018129766f,
-0.6054385f,1.3563728f,-0.0246436f,
-0.72799015f,1.3352785f,0.032049835f,
-0.05270624f,1.3978419f,-0.021121562f,
-0.71102375f,1.3809891f,-0.07421714f,
-0.70958465f,1.3854201f,-0.062363446f,
-0.7120842f,1.3857989f,-0.04418522f,
-0.22986835f,1.3876166f,-0.008964896f,
-0.6931485f,1.333915f,0.0066003203f,
-0.6958374f,1.3453522f,0.0019042492f,
-0.6598596f,1.3618641f,-0.009229958f,
-0.16934997f,1.3448219f,0.025346577f,
-0.1445443f,1.3263786f,0.0337919f,
-0.121480584f,1.3868971f,0.015954554f,
-0.038958967f,1.3661437f,0.037465394f,
-0.7959313f,1.3804967f,-0.026423514f,
-0.79547685f,1.3809135f,-0.034906745f,
-0.7538941f,1.3788683f,-0.03725469f,
-0.7537048f,1.3800044f,-0.02430278f,
-0.7769199f,1.3700066f,-0.024870813f,
-0.77710927f,1.3702717f,-0.035588384f,
-0.75374264f,1.3687189f,-0.023810446f,
-0.7541592f,1.3685675f,-0.036573052f,
-0.7755944f,1.3792472f,-0.023924053f,
-0.7753293f,1.3792472f,-0.037065327f,
-0.10902089f,1.2038648f,0.07836646f,
-0.15264869f,1.2712002f,-0.08186722f,
-0.09292561f,1.2367373f,-0.09197885f,
-0.6857636f,1.3519418f,0.015310645f,
-0.71920395f,1.3614097f,-0.042518914f,
-0.720075f,1.3613718f,-0.02430278f,
-0.72079456f,1.361599f,-0.0366109f,
-0.71977204f,1.3616369f,-0.0174101f,
-0.70094997f,1.3542142f,-0.0022994876f,
-0.66383606f,1.3289919f,-0.009381473f,
-0.047290683f,1.5636427f,0.078139186f,
-0.03759563f,1.562052f,0.08306247f,
-0.034830987f,1.5626581f,0.083857834f,
-0.021613836f,1.5566363f,0.08185065f,
-0.020288408f,1.5561063f,0.081623375f,
-0.017523766f,1.55675f,0.08154768f,
-0.018508434f,1.5562198f,0.08185065f,
-0.020667076f,1.5581512f,0.08294886f,
-0.018470585f,1.5578861f,0.08211571f,
-0.017637372f,1.5572803f,0.081661284f,
-0.03255868f,1.5546293f,0.08419865f,
-0.0417614f,1.5602722f,0.0804494f,
-0.031005979f,1.5626581f,0.08461523f,
-0.022409141f,1.5603101f,0.083251834f,
-0.026158392f,1.5617113f,0.08363056f,
-0.042897582f,1.5592873f,0.0795784f,
-0.043200552f,1.558795f,0.07844216f,
-0.043124795f,1.5583785f,0.07870728f,
-0.042556763f,1.5577347f,0.07923752f,
-0.041799247f,1.5570908f,0.07999498f,
-0.03982997f,1.5561063f,0.08215356f,
-0.030059159f,1.5545914f,0.08431232f,
-0.03793645f,1.5554245f,0.083251834f,
-0.0350582f,1.5546672f,0.08389574f,
-0.027143061f,1.5547807f,0.083857834f,
-0.023886204f,1.5557275f,0.08264589f,
-0.012070298f,1.5690582f,0.09283328f,
-0.069634736f,0.0070926547f,-0.08088249f,
-0.06986195f,0.098362565f,-0.08220804f,
-0.124926925f,0.021900296f,0.097415745f,
-0.044374526f,0.043373346f,0.071549654f,
-0.050130963f,0.027050793f,0.13320416f,
-0.042140126f,0.011864424f,0.059544444f,
-0.05081266f,0.011902273f,0.12892473f,
-0.12606305f,0.014553249f,0.096771896f,
-0.12943357f,0.012129486f,0.053447127f,
-0.12822169f,0.03000474f,0.06420255f,
-0.10621846f,0.09737784f,-0.024378479f,
-0.05213821f,0.09790802f,-0.052782f,
-0.10932386f,0.10237688f,-0.07459587f,
-0.11462587f,0.012583971f,-0.07622433f,
-2.7239323E-5f,0.90290135f,-0.04835105f,
-2.7239323E-5f,0.90290135f,-0.032710195f,
0.007660687f,0.90290135f,-0.040511668f,
-2.7239323E-5f,0.8950619f,-0.040511668f,
-2.7239323E-5f,0.9107407f,-0.040511668f,
-2.7239323E-5f,0.9484226f,-0.07452011f,
-2.7239323E-5f,0.9484226f,-0.054524064f,
0.009857178f,0.9484226f,-0.06452209f,
-2.7239323E-5f,0.9384245f,-0.06452209f,
-0.009911656f,0.9484226f,-0.06452209f,
-2.7239323E-5f,0.9584206f,-0.06452209f,
-2.7239323E-5f,1.5185757f,0.002548039f,
-2.7239323E-5f,1.5185757f,0.015840828f,
0.0066381693f,1.5185757f,0.009213388f,
-2.7239323E-5f,1.5119104f,0.009213388f,
-0.00665468f,1.5185757f,0.009213388f,
-2.7239323E-5f,1.5252032f,0.009213388f,
-2.7239323E-5f,1.5273998f,-0.045965135f,
-2.7239323E-5f,1.5273998f,-0.032142103f,
0.0069032907f,1.5273998f,-0.039034724f,
-2.7239323E-5f,1.5204692f,-0.039034724f,
-0.0069198012f,1.5273998f,-0.039034724f,
-2.7239323E-5f,1.5342925f,-0.039034724f,
-2.7239323E-5f,1.4190876f,-0.058008254f,
-2.7239323E-5f,1.4190876f,-0.03827727f,
0.009857178f,1.4190876f,-0.048161685f,
-2.7239323E-5f,1.4092033f,-0.048161685f,
-0.009911656f,1.4190876f,-0.048161685f,
-2.7239323E-5f,1.4289722f,-0.048161685f,
-2.7239323E-5f,1.1096408f,-0.03914833f,
-2.7239323E-5f,1.1096408f,-0.019530952f,
0.009970784f,1.1096408f,-0.029226005f,
-2.7239323E-5f,1.099643f,-0.029301763f,
-0.010025263f,1.1096408f,-0.029226005f,
-2.7239323E-5f,1.1196389f,-0.029415369f,
-2.7239323E-5f,1.2383277f,-0.08122343f,
-2.7239323E-5f,1.2383277f,-0.061227262f,
0.009970784f,1.2383277f,-0.071225286f,
-2.7239323E-5f,1.2283297f,-0.071225286f,
-0.010025263f,1.2383277f,-0.071225286f,
-2.7239323E-5f,1.2483258f,-0.071225286f,
0.6343177f,1.3481927f,-0.038996816f,
0.6385214f,1.3439889f,-0.038996816f,
0.6343177f,1.3397851f,-0.038996816f,
0.63011414f,1.3439889f,-0.038996816f,
0.6343177f,1.3439889f,-0.034830987f,
0.6343177f,1.3439889f,-0.043200552f,
0.6567754f,1.3473594f,-0.014191091f,
0.66014606f,1.3439889f,-0.014191091f,
0.6567754f,1.3406184f,-0.014191091f,
0.6534049f,1.3439889f,-0.014191091f,
0.6567754f,1.3439889f,-0.010820568f,
0.6567754f,1.3439889f,-0.017523766f,
0.6889283f,1.3453522f,0.012849033f,
0.692223f,1.3419058f,0.012811184f,
0.6889283f,1.3381946f,0.012849033f,
0.68563336f,1.3415651f,0.012773335f,
0.6889661f,1.3417165f,0.01610601f,
0.6888904f,1.3417544f,0.009440601f,
0.70877284f,1.3435721f,0.01989311f,
0.7113102f,1.3411107f,0.019930959f,
0.70877284f,1.3387628f,0.019930959f,
0.70646256f,1.3412621f,0.019817352f,
0.7087349f,1.3411107f,0.02243048f,
0.70892435f,1.3411486f,0.017393649f,
0.713734f,1.3789821f,-0.010631204f,
0.7166123f,1.3761418f,-0.010631204f,
0.713734f,1.3732634f,-0.010631204f,
0.7108936f,1.3761418f,-0.010631204f,
0.713734f,1.3761418f,-0.007752955f,
0.713734f,1.3761418f,-0.013471544f,
0.71532446f,1.3786411f,-0.029945552f,
0.71786195f,1.3761418f,-0.029945552f,
0.71532446f,1.3736043f,-0.029945552f,
0.712825f,1.3761418f,-0.029945552f,
0.71532446f,1.3761418f,-0.027408183f,
0.71532446f,1.3761418f,-0.032482922f,
0.71335524f,1.3786411f,-0.050055265f,
0.7158925f,1.3761418f,-0.050055265f,
0.71335524f,1.3736043f,-0.050055265f,
0.7108179f,1.3761418f,-0.050055265f,
0.71335524f,1.3761418f,-0.047517896f,
0.71335524f,1.3761418f,-0.052554786f,
0.70748526f,1.3794744f,-0.06702167f,
0.7108558f,1.3761418f,-0.06702167f,
0.70748526f,1.372771f,-0.06702167f,
0.7041524f,1.3761418f,-0.06702167f,
0.70748526f,1.3761418f,-0.063651085f,
0.70748526f,1.3761418f,-0.07039213f,
0.74588674f,1.3779216f,-0.009229958f,
0.748462f,1.3753462f,-0.009229958f,
0.74588674f,1.3727331f,-0.009229958f,
0.74327356f,1.3753462f,-0.009229958f,
0.74588674f,1.3753462f,-0.00665468f,
0.74588674f,1.3753462f,-0.011843026f,
0.7494467f,1.3776944f,-0.030740857f,
0.75179464f,1.3753462f,-0.030740857f,
0.7494467f,1.3729982f,-0.030740857f,
0.7470607f,1.3753462f,-0.030740857f,
0.7494467f,1.3753462f,-0.028392851f,
0.7494467f,1.3753462f,-0.033088923f,
0.74592465f,1.3776944f,-0.052024603f,
0.7482726f,1.3753462f,-0.052024603f,
0.74592465f,1.3729982f,-0.052024603f,
0.7435387f,1.3753462f,-0.052024603f,
0.74592465f,1.3753462f,-0.049676538f,
0.74592465f,1.3753462f,-0.05437261f,
0.7374036f,1.3776944f,-0.069975555f,
0.7397515f,1.3753462f,-0.069975555f,
0.7374036f,1.3729982f,-0.069975555f,
0.7350555f,1.3753462f,-0.069975555f,
0.7374036f,1.3753462f,-0.0675897f,
0.7374036f,1.3753462f,-0.07232362f,
0.75929314f,1.3776944f,-0.07035428f,
0.7616412f,1.3753462f,-0.07035428f,
0.75929314f,1.3729982f,-0.07035428f,
0.7569452f,1.3753462f,-0.07035428f,
0.75929314f,1.3753462f,-0.06800628f,
0.75929314f,1.3753462f,-0.07270235f,
0.7703137f,1.3776944f,-0.05122924f,
0.77266175f,1.3753462f,-0.05122924f,
0.7703137f,1.3729982f,-0.05122924f,
0.76796573f,1.3753462f,-0.05122924f,
0.7703137f,1.3753462f,-0.048881233f,
0.7703137f,1.3753462f,-0.053577304f,
0.77565366f,1.3776944f,-0.03032428f,
0.7780016f,1.3753462f,-0.03032428f,
0.77565366f,1.3729982f,-0.03032428f,
0.7733055f,1.3753462f,-0.03032428f,
0.77565366f,1.3753462f,-0.027976274f,
0.77565366f,1.3753462f,-0.032710195f,
0.7689504f,1.3778837f,-0.0070712566f,
0.7714878f,1.3753462f,-0.0070712566f,
0.7689504f,1.372771f,-0.0070712566f,
0.7663751f,1.3753462f,-0.0070712566f,
0.7689504f,1.3753462f,-0.0045339465f,
0.7689504f,1.3753462f,-0.009646535f,
0.39557678f,1.3420954f,-0.049979508f,
0.4035676f,1.3341043f,-0.049979508f,
0.39557678f,1.3261135f,-0.049979508f,
0.38758594f,1.3341043f,-0.049979508f,
0.39557678f,1.3341043f,-0.04198861f,
0.39557678f,1.3341043f,-0.057970405f,
0.16743988f,1.3633032f,-0.04043591f,
0.17543066f,1.3553123f,-0.04043591f,
0.16743988f,1.3472838f,-0.04043591f,
0.15941113f,1.3553123f,-0.04043591f,
0.16743988f,1.3553123f,-0.032407165f,
0.16743988f,1.3553123f,-0.048426807f,
0.110367775f,0.88703316f,-0.042821825f,
0.11843437f,0.8786257f,-0.042821825f,
0.11051923f,0.8702182f,-0.042821825f,
0.10233897f,0.8786257f,-0.042821825f,
0.11021626f,0.8786257f,-0.03445226f,
0.11010265f,0.8786257f,-0.05122924f,
0.084804595f,0.4669643f,-0.022977293f,
0.0931741f,0.45855683f,-0.022977293f,
0.0849182f,0.45018727f,-0.022977293f,
0.076586485f,0.45855683f,-0.022977293f,
0.0848425f,0.45855683f,-0.014569819f,
0.0848425f,0.45855683f,-0.031384647f,
0.0820021f,0.08722836f,-0.05592531f,
0.09097761f,0.07825285f,-0.05592531f,
0.0820021f,0.069315255f,-0.05592531f,
0.07306445f,0.07825285f,-0.05592531f,
0.0820021f,0.07825285f,-0.046949744f,
0.0820021f,0.07825285f,-0.064900815f,
0.121464014f,0.025838971f,0.059468627f,
0.12597072f,0.021370113f,0.059468627f,
0.121464014f,0.016901314f,0.059468627f,
0.116995215f,0.021370113f,0.059468627f,
0.121464014f,0.021370113f,0.063975334f,
0.121464014f,0.021370113f,0.054999888f,
0.12275171f,0.023793876f,0.084122956f,
0.12725836f,0.019325018f,0.084122956f,
0.12275171f,0.014856219f,0.084122956f,
0.118282914f,0.019325018f,0.084122956f,
0.12275171f,0.019325018f,0.088591754f,
0.12275171f,0.019325018f,0.07961625f,
0.106580615f,0.023225784f,0.0663234f,
0.11104941f,0.018719077f,0.0663234f,
0.106580615f,0.0142502785f,0.0663234f,
0.10207391f,0.018719077f,0.0663234f,
0.106580615f,0.018719077f,0.0707922f,
0.106580615f,0.018719077f,0.061816692f,
0.10767883f,0.025195062f,0.09378016f,
0.11214769f,0.020726323f,0.09378016f,
0.10767883f,0.016257524f,0.09378016f,
0.10321009f,0.020726323f,0.09378016f,
0.10767883f,0.020726323f,0.09828681f,
0.10767883f,0.020726323f,0.0893113f,
0.09059894f,0.025422275f,0.07204193f,
0.09506774f,0.020953536f,0.07204193f,
0.09059894f,0.016484737f,0.07204193f,
0.08609223f,0.020953536f,0.07204193f,
0.09059894f,0.020953536f,0.07651079f,
0.09059894f,0.020953536f,0.06757313f,
0.093628645f,0.024892092f,0.10737586f,
0.09813529f,0.020423293f,0.10737586f,
0.093628645f,0.015954554f,0.10737586f,
0.08915985f,0.020423293f,0.10737586f,
0.093628645f,0.020423293f,0.11184472f,
0.093628645f,0.020423293f,0.10290706f,
0.07325381f,0.02621764f,0.07738179f,
0.07772261f,0.021710932f,0.07738179f,
0.07325381f,0.017242134f,0.07738179f,
0.0687471f,0.021710932f,0.07738179f,
0.07325381f,0.021710932f,0.08185065f,
0.07325381f,0.021710932f,0.07287508f,
0.07681376f,0.029398799f,0.10381603f,
0.081282556f,0.024892092f,0.10381603f,
0.07681376f,0.020423293f,0.10381603f,
0.0723449f,0.024892092f,0.10381603f,
0.07681376f,0.024892092f,0.10828483f,
0.07681376f,0.024892092f,0.099309325f,
0.07764691f,0.024778485f,0.119229674f,
0.08211571f,0.020271778f,0.119229674f,
0.07764691f,0.01580298f,0.119229674f,
0.07317805f,0.020271778f,0.119229674f,
0.07764691f,0.020271778f,0.12369847f,
0.07764691f,0.020271778f,0.11472297f,
0.05147785f,0.026709974f,0.07870728f,
0.055984497f,0.022241175f,0.07870728f,
0.05147785f,0.017772317f,0.07870728f,
0.04700899f,0.022241175f,0.07870728f,
0.05147785f,0.022241175f,0.083213985f,
0.05147785f,0.022241175f,0.07423854f,
0.05367434f,0.027694643f,0.10695928f,
0.05814314f,0.023225784f,0.10695928f,
0.05367434f,0.018756986f,0.10695928f,
0.049167633f,0.023225784f,0.10695928f,
0.05367434f,0.023225784f,0.11142814f,
0.05367434f,0.023225784f,0.102452636f,
0.026634216f,1.3569031f,0.014439642f,
0.034662902f,1.3489122f,0.014439642f,
0.026634216f,1.3409214f,0.014439642f,
0.01864338f,1.3489122f,0.014439642f,
0.026634216f,1.3489122f,0.02243048f,
0.026634216f,1.3489122f,0.0064109564f,
0.093363464f,0.020953536f,0.08438802f,
0.093363464f,0.020953536f,0.093363464f,
0.088894725f,0.020953536f,0.088894725f,
0.093363464f,0.016484737f,0.088894725f,
0.09787017f,0.020953536f,0.088894725f,
0.093363464f,0.025422275f,0.088894725f,
0.10832274f,0.025460184f,0.07935113f,
0.11282945f,0.020991445f,0.07935113f,
0.10832274f,0.016484737f,0.07935113f,
0.10385388f,0.020991445f,0.07935113f,
0.10832274f,0.020991445f,0.083819985f,
0.10832274f,0.020991445f,0.07488233f,
0.123319745f,0.020991445f,0.06700504f,
0.123319745f,0.020991445f,0.075942695f,
0.11885095f,0.020991445f,0.0714739f,
0.123319745f,0.016484737f,0.0714739f,
0.1277886f,0.020991445f,0.0714739f,
0.123319745f,0.025460184f,0.0714739f,
-0.0077150464f,0.90290135f,-0.040511668f,
-0.123374164f,0.025460184f,0.0714739f,
-0.12784296f,0.020991445f,0.0714739f,
-0.123374164f,0.016484737f,0.0714739f,
-0.11886746f,0.020991445f,0.0714739f,
-0.123374164f,0.020991445f,0.075942695f,
-0.123374164f,0.020991445f,0.06700504f,
-0.1083771f,0.020991445f,0.07488233f,
-0.1083771f,0.020991445f,0.083819985f,
-0.1039083f,0.020991445f,0.07935113f,
-0.1083771f,0.016484737f,0.07935113f,
-0.11284596f,0.020991445f,0.07935113f,
-0.1083771f,0.025460184f,0.07935113f,
-0.09341794f,0.025422275f,0.088894725f,
-0.09788674f,0.020953536f,0.088894725f,
-0.09341794f,0.016484737f,0.088894725f,
-0.088949084f,0.020953536f,0.088894725f,
-0.09341794f,0.020953536f,0.093363464f,
-0.09341794f,0.020953536f,0.08438802f,
-0.026688635f,1.3489122f,0.0064109564f,
-0.026688635f,1.3489122f,0.02243048f,
-0.018697798f,1.3489122f,0.014439642f,
-0.026688635f,1.3409214f,0.014439642f,
-0.034679472f,1.3489122f,0.014439642f,
-0.026688635f,1.3569031f,0.014439642f,
-0.05369091f,0.023225784f,0.102452636f,
-0.05369091f,0.023225784f,0.11142814f,
-0.04922211f,0.023225784f,0.10695928f,
-0.05369091f,0.018756986f,0.10695928f,
-0.05815971f,0.023225784f,0.10695928f,
-0.05369091f,0.027694643f,0.10695928f,
-0.05153221f,0.022241175f,0.07423854f,
-0.05153221f,0.022241175f,0.083213985f,
-0.04706335f,0.022241175f,0.07870728f,
-0.05153221f,0.017772317f,0.07870728f,
-0.056001008f,0.022241175f,0.07870728f,
-0.05153221f,0.026709974f,0.07870728f,
-0.07770133f,0.020271778f,0.11472297f,
-0.07770133f,0.020271778f,0.12369847f,
-0.07319462f,0.020271778f,0.119229674f,
-0.07770133f,0.01580298f,0.119229674f,
-0.08217019f,0.020271778f,0.119229674f,
-0.07770133f,0.024778485f,0.119229674f,
-0.07686818f,0.024892092f,0.099309325f,
-0.07686818f,0.024892092f,0.10828483f,
-0.07236147f,0.024892092f,0.10381603f,
-0.07686818f,0.020423293f,0.10381603f,
-0.081337035f,0.024892092f,0.10381603f,
-0.07686818f,0.029398799f,0.10381603f,
-0.07327038f,0.021710932f,0.07287508f,
-0.07327038f,0.021710932f,0.08185065f,
-0.06880158f,0.021710932f,0.07738179f,
-0.07327038f,0.017242134f,0.07738179f,
-0.07773924f,0.021710932f,0.07738179f,
-0.07327038f,0.02621764f,0.07738179f,
-0.093683004f,0.020423293f,0.10290706f,
-0.093683004f,0.020423293f,0.11184472f,
-0.089214206f,0.020423293f,0.10737586f,
-0.093683004f,0.015954554f,0.10737586f,
-0.09815186f,0.020423293f,0.10737586f,
-0.093683004f,0.024892092f,0.10737586f,
-0.09061545f,0.020953536f,0.06757313f,
-0.09061545f,0.020953536f,0.07651079f,
-0.08614665f,0.020953536f,0.07204193f,
-0.09061545f,0.016484737f,0.07204193f,
-0.0951221f,0.020953536f,0.07204193f,
-0.09061545f,0.025422275f,0.07204193f,
-0.10769546f,0.020726323f,0.0893113f,
-0.10769546f,0.020726323f,0.09828681f,
-0.1032266f,0.020726323f,0.09378016f,
-0.10769546f,0.016257524f,0.09378016f,
-0.11220211f,0.020726323f,0.09378016f,
-0.10769546f,0.025195062f,0.09378016f,
-0.106597185f,0.018719077f,0.061816692f,
-0.106597185f,0.018719077f,0.0707922f,
-0.10212833f,0.018719077f,0.0663234f,
-0.106597185f,0.0142502785f,0.0663234f,
-0.11110389f,0.018719077f,0.0663234f,
-0.106597185f,0.023225784f,0.0663234f,
-0.12280607f,0.019325018f,0.07961625f,
-0.12280607f,0.019325018f,0.088591754f,
-0.11833727f,0.019325018f,0.084122956f,
-0.12280607f,0.014856219f,0.084122956f,
-0.12727493f,0.019325018f,0.084122956f,
-0.12280607f,0.023793876f,0.084122956f,
-0.12151849f,0.021370113f,0.054999888f,
-0.12151849f,0.021370113f,0.063975334f,
-0.117049634f,0.021370113f,0.059468627f,
-0.12151849f,0.016901314f,0.059468627f,
-0.12598729f,0.021370113f,0.059468627f,
-0.12151849f,0.025838971f,0.059468627f,
-0.08205658f,0.07825285f,-0.064900815f,
-0.08205658f,0.07825285f,-0.046949744f,
-0.07308102f,0.07825285f,-0.05592531f,
-0.08205658f,0.069315255f,-0.05592531f,
-0.09103209f,0.07825285f,-0.05592531f,
-0.08205658f,0.08722836f,-0.05592531f,
-0.08489686f,0.45855683f,-0.031384647f,
-0.08489686f,0.45855683f,-0.014569819f,
-0.07664096f,0.45855683f,-0.022977293f,
-0.08493471f,0.45018727f,-0.022977293f,
-0.09322858f,0.45855683f,-0.022977293f,
-0.08485901f,0.4669643f,-0.022977293f,
-0.11015701f,0.8786257f,-0.05122924f,
-0.11023283f,0.8786257f,-0.03445226f,
-0.1023556f,0.8786257f,-0.042821825f,
-0.1105358f,0.8702182f,-0.042821825f,
-0.11848873f,0.8786257f,-0.042821825f,
-0.110422194f,0.88703316f,-0.042821825f,
-0.16745639f,1.3553123f,-0.048426807f,
-0.16745639f,1.3553123f,-0.032407165f,
-0.15946555f,1.3553123f,-0.04043591f,
-0.16745639f,1.3472838f,-0.04043591f,
-0.17544729f,1.3553123f,-0.04043591f,
-0.16745639f,1.3633032f,-0.04043591f,
-0.39559337f,1.3341043f,-0.057970405f,
-0.39559337f,1.3341043f,-0.04198861f,
-0.38760248f,1.3341043f,-0.049979508f,
-0.39559337f,1.3261135f,-0.049979508f,
-0.4036221f,1.3341043f,-0.049979508f,
-0.39559337f,1.3420954f,-0.049979508f,
-0.7689669f,1.3753462f,-0.009646535f,
-0.7689669f,1.3753462f,-0.0045339465f,
-0.76642954f,1.3753462f,-0.0070712566f,
-0.7689669f,1.372771f,-0.0070712566f,
-0.7715422f,1.3753462f,-0.0070712566f,
-0.7689669f,1.3778837f,-0.0070712566f,
-0.7756701f,1.3753462f,-0.032710195f,
-0.7756701f,1.3753462f,-0.027976274f,
-0.7733221f,1.3753462f,-0.03032428f,
-0.7756701f,1.3729982f,-0.03032428f,
-0.7780182f,1.3753462f,-0.03032428f,
-0.7756701f,1.3776944f,-0.03032428f,
-0.77036816f,1.3753462f,-0.053577304f,
-0.77036816f,1.3753462f,-0.048881233f,
-0.76802015f,1.3753462f,-0.05122924f,
-0.77036816f,1.3729982f,-0.05122924f,
-0.77271616f,1.3753462f,-0.05122924f,
-0.77036816f,1.3776944f,-0.05122924f,
-0.7593097f,1.3753462f,-0.07270235f,
-0.7593097f,1.3753462f,-0.06800628f,
-0.7569617f,1.3753462f,-0.07035428f,
-0.7593097f,1.3729982f,-0.07035428f,
-0.7616577f,1.3753462f,-0.07035428f,
-0.7593097f,1.3776944f,-0.07035428f,
-0.7374201f,1.3753462f,-0.07232362f,
-0.7374201f,1.3753462f,-0.0675897f,
-0.7350721f,1.3753462f,-0.069975555f,
-0.7374201f,1.3729982f,-0.069975555f,
-0.7397681f,1.3753462f,-0.069975555f,
-0.7374201f,1.3776944f,-0.069975555f,
-0.74594116f,1.3753462f,-0.05437261f,
-0.74594116f,1.3753462f,-0.049676538f,
-0.7435931f,1.3753462f,-0.052024603f,
-0.74594116f,1.3729982f,-0.052024603f,
-0.74828917f,1.3753462f,-0.052024603f,
-0.74594116f,1.3776944f,-0.052024603f,
-0.74946314f,1.3753462f,-0.033088923f,
-0.74946314f,1.3753462f,-0.028392851f,
-0.74711514f,1.3753462f,-0.030740857f,
-0.74946314f,1.3729982f,-0.030740857f,
-0.7518112f,1.3753462f,-0.030740857f,
-0.74946314f,1.3776944f,-0.030740857f,
-0.74590325f,1.3753462f,-0.011843026f,
-0.74590325f,1.3753462f,-0.00665468f,
-0.74332803f,1.3753462f,-0.009229958f,
-0.74590325f,1.3727331f,-0.009229958f,
-0.7485164f,1.3753462f,-0.009229958f,
-0.74590325f,1.3779216f,-0.009229958f,
-0.7075396f,1.3761418f,-0.07039213f,
-0.7075396f,1.3761418f,-0.063651085f,
-0.70416903f,1.3761418f,-0.06702167f,
-0.7075396f,1.372771f,-0.06702167f,
-0.71091014f,1.3761418f,-0.06702167f,
-0.7075396f,1.3794744f,-0.06702167f,
-0.71340966f,1.3761418f,-0.052554786f,
-0.71340966f,1.3761418f,-0.047517896f,
-0.7108723f,1.3761418f,-0.050055265f,
-0.71340966f,1.3736043f,-0.050055265f,
-0.7159092f,1.3761418f,-0.050055265f,
-0.71340966f,1.3786411f,-0.050055265f,
-0.71537894f,1.3761418f,-0.032482922f,
-0.71537894f,1.3761418f,-0.027408183f,
-0.7128416f,1.3761418f,-0.029945552f,
-0.71537894f,1.3736043f,-0.029945552f,
-0.71787846f,1.3761418f,-0.029945552f,
-0.71537894f,1.3786411f,-0.029945552f,
-0.7137884f,1.3761418f,-0.013471544f,
-0.7137884f,1.3761418f,-0.007752955f,
-0.71091014f,1.3761418f,-0.010631204f,
-0.7137884f,1.3732634f,-0.010631204f,
-0.7166666f,1.3761418f,-0.010631204f,
-0.7137884f,1.3789821f,-0.010631204f,
-0.7089787f,1.3411486f,0.017393649f,
-0.70878935f,1.3411107f,0.02243048f,
-0.7065171f,1.3412621f,0.019817352f,
-0.7088272f,1.3387628f,0.019930959f,
-0.7113646f,1.3411107f,0.019930959f,
-0.70878935f,1.3435721f,0.01989311f,
-0.6889069f,1.3417544f,0.009440601f,
-0.6890205f,1.3417165f,0.01610601f,
-0.68565f,1.3415651f,0.012773335f,
-0.6889826f,1.3381946f,0.012849033f,
-0.69227743f,1.3419058f,0.012811184f,
-0.6889826f,1.3453522f,0.012849033f,
-0.65682983f,1.3439889f,-0.017523766f,
-0.65682983f,1.3439889f,-0.010820568f,
-0.6534593f,1.3439889f,-0.014191091f,
-0.65682983f,1.3406184f,-0.014191091f,
-0.66016257f,1.3439889f,-0.014191091f,
-0.65682983f,1.3473594f,-0.014191091f,
-0.6343343f,1.3439889f,-0.043200552f,
-0.6343343f,1.3439889f,-0.034830987f,
-0.6301306f,1.3439889f,-0.038996816f,
-0.6343343f,1.3397851f,-0.038996816f,
-0.638538f,1.3439889f,-0.038996816f,
-0.6343343f,1.3481927f,-0.038996816f,
-0.0035113096f,1.5200906f,0.11222339f,
-0.0847075f,0.02144587f,0.065793216f,
-0.0934937f,0.0126597285f,0.065793216f,
-0.0847075f,0.003873527f,0.065793216f,
-0.07592136f,0.0126597285f,0.065793216f,
-0.0847075f,0.0126597285f,0.07457936f,
-0.0847075f,0.0126597285f,0.057007015f,
-0.0847075f,0.020044565f,0.13933933f,
-0.0847075f,0.020044565f,0.15691167f,
-0.07592136f,0.020044565f,0.14812547f,
-0.0847075f,0.011258483f,0.14812547f,
-0.0934937f,0.020044565f,0.14812547f,
-0.0847075f,0.028830767f,0.14812547f,
-0.057894647f,0.027694643f,0.13672614f,
-0.062401354f,0.023225784f,0.13672614f,
-0.057894647f,0.018756986f,0.13672614f,
-0.05342579f,0.023225784f,0.13672614f,
-0.057894647f,0.023225784f,0.141195f,
-0.057894647f,0.023225784f,0.1322574f,
-0.078686f,0.021710932f,0.13665044f,
-0.083192706f,0.017242134f,0.13665044f,
-0.078686f,0.012773335f,0.13665044f,
-0.07421714f,0.017242134f,0.13665044f,
-0.078686f,0.017242134f,0.14111924f,
-0.078686f,0.017242134f,0.13218158f,
-0.09440255f,0.024892092f,0.12740988f,
-0.09890926f,0.020423293f,0.12740988f,
-0.09440255f,0.015954554f,0.12740988f,
-0.08993375f,0.020423293f,0.12740988f,
-0.09440255f,0.020423293f,0.13191652f,
-0.09440255f,0.020423293f,0.12294108f,
-0.10780907f,0.025195062f,0.112905145f,
-0.112315714f,0.020726323f,0.112905145f,
-0.10780907f,0.016257524f,0.112905145f,
-0.10334021f,0.020726323f,0.112905145f,
-0.10780907f,0.020726323f,0.11737394f,
-0.10780907f,0.020726323f,0.108436346f,
-0.12159425f,0.023793876f,0.10002887f,
-0.12606305f,0.019325018f,0.10002887f,
-0.12159425f,0.014856219f,0.10002887f,
-0.11712539f,0.019325018f,0.10002887f,
-0.12159425f,0.019325018f,0.10453558f,
-0.12159425f,0.019325018f,0.095560074f,
-0.66417694f,1.3476245f,-0.053577304f,
-0.6678126f,1.3439889f,-0.053577304f,
-0.66417694f,1.3403533f,-0.053577304f,
-0.6605034f,1.3439889f,-0.053577304f,
-0.66417694f,1.3439889f,-0.04994166f,
-0.66417694f,1.3439889f,-0.057250857f,
-0.66372246f,1.3439889f,-0.035133958f,
-0.66372246f,1.3439889f,-0.027862668f,
-0.6600868f,1.3439889f,-0.031498253f,
-0.66372246f,1.3403533f,-0.031498253f,
-0.66739595f,1.3439889f,-0.031498253f,
-0.66372246f,1.3476245f,-0.031498253f,
-0.73132277f,1.3427012f,0.02875501f,
-0.7338602f,1.3402395f,0.028792858f,
-0.73132277f,1.3378916f,0.028792858f,
-0.72901267f,1.3403912f,0.028679252f,
-0.7312849f,1.3402395f,0.03125453f,
-0.7314743f,1.3402774f,0.02621764f,
-0.7907051f,1.3778458f,-0.00695765f,
-0.79324245f,1.3753462f,-0.00695765f,
-0.7907051f,1.3728089f,-0.00695765f,
-0.78816766f,1.3753462f,-0.00695765f,
-0.7907051f,1.3753462f,-0.0044202805f,
-0.7907051f,1.3753462f,-0.009457171f,
-0.7985444f,1.3777702f,-0.030778706f,
-0.8009303f,1.3753462f,-0.030778706f,
-0.7985444f,1.3729224f,-0.030778706f,
-0.79612064f,1.3753462f,-0.030778706f,
-0.7985444f,1.3753462f,-0.028355002f,
-0.7985444f,1.3753462f,-0.03320253f,
-0.79233354f,1.3776944f,-0.051721573f,
-0.79468155f,1.3753462f,-0.051721573f,
-0.79233354f,1.3729982f,-0.051721573f,
-0.7899855f,1.3753462f,-0.051721573f,
-0.79233354f,1.3753462f,-0.049373567f,
-0.79233354f,1.3753462f,-0.054107487f,
-0.776882f,1.3776944f,-0.07024068f,
-0.77923006f,1.3753462f,-0.07024068f,
-0.776882f,1.3729982f,-0.07024068f,
-0.774534f,1.3753462f,-0.07024068f,
-0.776882f,1.3753462f,-0.06789267f,
-0.776882f,1.3753462f,-0.07258874f,
-2.7239323E-5f,1.4909675f,0.01326561f,
-0.005632162f,1.4853625f,0.01326561f,
-2.7239323E-5f,1.4797575f,0.01326561f,
0.005577743f,1.4853625f,0.01326561f,
-2.7239323E-5f,1.4853625f,0.018870592f,
-2.7239323E-5f,1.4853625f,0.007660687f,
-2.7239323E-5f,1.4914598f,0.03602636f,
-2.7239323E-5f,1.4914598f,0.048561692f,
0.0062594414f,1.4914598f,0.04227507f,
-2.7239323E-5f,1.4851732f,0.04227507f,
-0.006275952f,1.4914598f,0.04227507f,
-2.7239323E-5f,1.4977086f,0.04227507f,
-2.7239323E-5f,1.4963453f,0.06575531f,
-0.006616831f,1.4897556f,0.06575531f,
-2.7239323E-5f,1.4831283f,0.06575531f,
0.006562412f,1.4897556f,0.06575531f,
-2.7239323E-5f,1.4897556f,0.07238281f,
-2.7239323E-5f,1.4897556f,0.059165657f,
-2.7239323E-5f,1.4850218f,0.073708236f,
-2.7239323E-5f,1.4850218f,0.08662242f,
0.0064488053f,1.4850218f,0.08014643f,
-2.7239323E-5f,1.4785836f,0.08014643f,
-0.006465316f,1.4850218f,0.08014643f,
-2.7239323E-5f,1.4914978f,0.08014643f,
-2.7239323E-5f,1.4562016f,0.087644935f,
-0.0071091056f,1.4491196f,0.087644935f,
-2.7239323E-5f,1.4419999f,0.087644935f,
0.007054746f,1.4491196f,0.087644935f,
-2.7239323E-5f,1.4491196f,0.09472692f,
-2.7239323E-5f,1.4491196f,0.08056301f,
-0.031725526f,1.574739f,0.10381603f,
-0.035626233f,1.5708003f,0.10381603f,
-0.031725526f,1.5668995f,0.10381603f,
-0.02778691f,1.5708003f,0.10381603f,
-0.031725526f,1.5708003f,0.10775459f,
-0.031725526f,1.5708003f,0.099915266f,
-0.031725526f,1.5593252f,0.10294491f,
-0.031725526f,1.5593252f,0.11078435f,
-0.02778691f,1.5593252f,0.106883585f,
-0.031725526f,1.5554245f,0.106883585f,
-0.035626233f,1.5593252f,0.106883585f,
-0.031725526f,1.5632639f,0.106883585f,
-0.031725526f,1.5480776f,0.10105139f,
-0.031725526f,1.5480776f,0.10889077f,
-0.02778691f,1.5480776f,0.104990005f,
-0.031725526f,1.5441768f,0.104990005f,
-0.035626233f,1.5480776f,0.104990005f,
-0.031725526f,1.5520163f,0.104990005f,
0.031671107f,1.5520163f,0.104990005f,
0.035571873f,1.5480776f,0.104990005f,
0.031671107f,1.5441768f,0.104990005f,
0.027732491f,1.5480776f,0.104990005f,
0.031671107f,1.5480776f,0.10889077f,
0.031671107f,1.5480776f,0.10105139f,
0.031671107f,1.5632639f,0.106883585f,
0.035571873f,1.5593252f,0.106883585f,
0.031671107f,1.5554245f,0.106883585f,
0.027732491f,1.5593252f,0.106883585f,
0.031671107f,1.5593252f,0.11078435f,
0.031671107f,1.5593252f,0.10294491f,
0.031671107f,1.5708003f,0.099915266f,
0.031671107f,1.5708003f,0.10775459f,
0.027732491f,1.5708003f,0.10381603f,
0.031671107f,1.5668995f,0.10381603f,
0.035571873f,1.5708003f,0.10381603f,
0.031671107f,1.574739f,0.10381603f,
0.77682763f,1.3753462f,-0.07258874f,
0.77682763f,1.3753462f,-0.06789267f,
0.77447945f,1.3753462f,-0.07024068f,
0.77682763f,1.3729982f,-0.07024068f,
0.7791756f,1.3753462f,-0.07024068f,
0.77682763f,1.3776944f,-0.07024068f,
0.792317f,1.3753462f,-0.054107487f,
0.792317f,1.3753462f,-0.049373567f,
0.7899311f,1.3753462f,-0.051721573f,
0.792317f,1.3729982f,-0.051721573f,
0.79466504f,1.3753462f,-0.051721573f,
0.792317f,1.3776944f,-0.051721573f,
0.79849f,1.3753462f,-0.03320253f,
0.79849f,1.3753462f,-0.028355002f,
0.7960662f,1.3753462f,-0.030778706f,
0.79849f,1.3729224f,-0.030778706f,
0.80091375f,1.3753462f,-0.030778706f,
0.79849f,1.3777702f,-0.030778706f,
0.79065067f,1.3753462f,-0.009457171f,
0.79065067f,1.3753462f,-0.0044202805f,
0.7881511f,1.3753462f,-0.00695765f,
0.79065067f,1.3728089f,-0.00695765f,
0.7931879f,1.3753462f,-0.00695765f,
0.79065067f,1.3778458f,-0.00695765f,
0.73145777f,1.3402774f,0.02621764f,
0.73123056f,1.3402395f,0.03125453f,
0.7289961f,1.3403912f,0.028679252f,
0.73130625f,1.3378916f,0.028792858f,
0.7338436f,1.3402395f,0.028792858f,
0.73126835f,1.3427012f,0.02875501f,
0.6637059f,1.3476245f,-0.031498253f,
0.66734153f,1.3439889f,-0.031498253f,
0.6637059f,1.3403533f,-0.031498253f,
0.66003245f,1.3439889f,-0.031498253f,
0.6637059f,1.3439889f,-0.027862668f,
0.6637059f,1.3439889f,-0.035133958f,
0.6641225f,1.3439889f,-0.057250857f,
0.6641225f,1.3439889f,-0.04994166f,
0.6604869f,1.3439889f,-0.053577304f,
0.6641225f,1.3403533f,-0.053577304f,
0.66779596f,1.3439889f,-0.053577304f,
0.6641225f,1.3476245f,-0.053577304f,
0.12157762f,0.019325018f,0.095560074f,
0.12157762f,0.019325018f,0.10453558f,
0.11707097f,0.019325018f,0.10002887f,
0.12157762f,0.014856219f,0.10002887f,
0.12604648f,0.019325018f,0.10002887f,
0.12157762f,0.023793876f,0.10002887f,
0.10779244f,0.020726323f,0.108436346f,
0.10779244f,0.020726323f,0.11737394f,
0.10328585f,0.020726323f,0.112905145f,
0.10779244f,0.016257524f,0.112905145f,
0.112261295f,0.020726323f,0.112905145f,
0.10779244f,0.025195062f,0.112905145f,
0.09438604f,0.020423293f,0.12294108f,
0.09438604f,0.020423293f,0.13191652f,
0.08987939f,0.020423293f,0.12740988f,
0.09438604f,0.015954554f,0.12740988f,
0.0988549f,0.020423293f,0.12740988f,
0.09438604f,0.024892092f,0.12740988f,
0.07866943f,0.017242134f,0.13218158f,
0.07866943f,0.017242134f,0.14111924f,
0.07420063f,0.017242134f,0.13665044f,
0.07866943f,0.012773335f,0.13665044f,
0.08313823f,0.017242134f,0.13665044f,
0.07866943f,0.021710932f,0.13665044f,
0.057878077f,0.023225784f,0.1322574f,
0.057878077f,0.023225784f,0.141195f,
0.05337137f,0.023225784f,0.13672614f,
0.057878077f,0.018756986f,0.13672614f,
0.062346876f,0.023225784f,0.13672614f,
0.057878077f,0.027694643f,0.13672614f,
0.08465314f,0.028830767f,0.14812547f,
0.09343928f,0.020044565f,0.14812547f,
0.08465314f,0.011258483f,0.14812547f,
0.07586694f,0.020044565f,0.14812547f,
0.08465314f,0.020044565f,0.15691167f,
0.08465314f,0.020044565f,0.13933933f,
0.08465314f,0.0126597285f,0.057007015f,
0.08465314f,0.0126597285f,0.07457936f,
0.07586694f,0.0126597285f,0.065793216f,
0.08465314f,0.003873527f,0.065793216f,
0.09343928f,0.0126597285f,0.065793216f,
0.08465314f,0.02144587f,0.065793216f,
-2.7239323E-5f,1.006896f,-0.018053949f,
-0.009911656f,0.9968979f,-0.018053949f,
-2.7239323E-5f,0.9868999f,-0.018053949f,
0.009857178f,0.9968979f,-0.018053949f,
-2.7239323E-5f,0.9968979f,-0.008055925f,
-2.7239323E-5f,0.9968979f,-0.028052032f,
0.11832076f,1.3820875f,-0.012259603f,
0.11832076f,1.3820875f,0.0037599206f,
0.11029202f,1.3820875f,-0.004230976f,
0.11832076f,1.3740966f,-0.004230976f,
0.12631154f,1.3820875f,-0.004230976f,
0.11832076f,1.3900783f,-0.004230976f,
-0.11833727f,1.3900783f,-0.004230976f,
-0.12632817f,1.3820875f,-0.004230976f,
-0.11833727f,1.3740966f,-0.004230976f,
-0.11034644f,1.3820875f,-0.004230976f,
-0.11833727f,1.3820875f,0.0037599206f,
-0.11833727f,1.3820875f,-0.012259603f,
-2.7239323E-5f,1.6771431f,-0.046268106f,
-0.0069198012f,1.6702125f,-0.046268106f,
-2.7239323E-5f,1.6633201f,-0.046268106f,
0.0069032907f,1.6702125f,-0.046268106f,
-2.7239323E-5f,1.6702125f,-0.039375544f,
-2.7239323E-5f,1.6702125f,-0.053198576f,
};
}
| |
package org.apache.taverna.workbench.models.graph.svg;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import static java.lang.Float.parseFloat;
import static java.lang.Math.PI;
import static java.lang.Math.atan2;
import static org.apache.batik.dom.svg.SVGDOMImplementation.getDOMImplementation;
import static org.apache.batik.util.SMILConstants.SMIL_ATTRIBUTE_NAME_ATTRIBUTE;
import static org.apache.batik.util.SMILConstants.SMIL_DUR_ATTRIBUTE;
import static org.apache.batik.util.SMILConstants.SMIL_FILL_ATTRIBUTE;
import static org.apache.batik.util.SMILConstants.SMIL_FREEZE_VALUE;
import static org.apache.batik.util.SMILConstants.SMIL_FROM_ATTRIBUTE;
import static org.apache.batik.util.SMILConstants.SMIL_TO_ATTRIBUTE;
import static org.apache.batik.util.SVGConstants.SVG_TYPE_ATTRIBUTE;
import static org.apache.batik.util.SVGConstants.SVG_X1_ATTRIBUTE;
import static org.apache.batik.util.SVGConstants.SVG_X2_ATTRIBUTE;
import static org.apache.batik.util.SVGConstants.SVG_Y1_ATTRIBUTE;
import static org.apache.batik.util.SVGConstants.SVG_Y2_ATTRIBUTE;
import static org.apache.batik.util.XMLResourceDescriptor.getXMLParserClassName;
import java.awt.Color;
import java.awt.Point;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringReader;
import java.util.List;
import org.apache.taverna.lang.io.StreamDevourer;
import org.apache.taverna.workbench.configuration.workbench.WorkbenchConfiguration;
import org.apache.taverna.workbench.models.graph.GraphShapeElement.Shape;
import org.apache.batik.dom.svg.SAXSVGDocumentFactory;
import org.apache.batik.dom.svg.SVGDOMImplementation;
import org.apache.batik.dom.svg.SVGOMAnimationElement;
import org.apache.batik.dom.svg.SVGOMPoint;
import org.apache.log4j.Logger;
import org.w3c.dom.DOMImplementation;
import org.w3c.dom.Element;
import org.w3c.dom.svg.SVGDocument;
import org.w3c.dom.svg.SVGElement;
import org.w3c.dom.svg.SVGLocatable;
import org.w3c.dom.svg.SVGMatrix;
//import org.apache.batik.transcoder.TranscoderException;
//import org.apache.batik.transcoder.svg2svg.PrettyPrinter;
/**
* Utility methods.
*
* @author David Withers
*/
public class SVGUtil {
private static final String C = "C";
private static final String M = "M";
private static final String SPACE = " ";
private static final String COMMA = ",";
public static final String svgNS = SVGDOMImplementation.SVG_NAMESPACE_URI;
private static final String SVG = "svg";
private static final Logger logger = Logger.getLogger(SVGUtil.class);
private static SAXSVGDocumentFactory docFactory;
static {
String parser = getXMLParserClassName();
logger.info("Using XML parser " + parser);
docFactory = new SAXSVGDocumentFactory(parser);
}
/**
* Creates a new SVGDocument.
*
* @return a new SVGDocument
*/
public static SVGDocument createSVGDocument() {
DOMImplementation impl = getDOMImplementation();
return (SVGDocument) impl.createDocument(svgNS, SVG, null);
}
/**
* Converts a point in screen coordinates to a point in document
* coordinates.
*
* @param locatable
* @param screenPoint
* the point in screen coordinates
* @return the point in document coordinates
*/
public static SVGOMPoint screenToDocument(SVGLocatable locatable,
SVGOMPoint screenPoint) {
SVGMatrix mat = ((SVGLocatable) locatable.getFarthestViewportElement())
.getScreenCTM().inverse();
return (SVGOMPoint) screenPoint.matrixTransform(mat);
}
/**
* Writes SVG to the console. For debugging only.
*
* @param svgDocument
* the document to output
*/
// public static void writeSVG(SVGDocument svgDocument) {
// writeSVG(svgDocument, new OutputStreamWriter(System.out));
// }
/**
* Writes SVG to an output stream.
*
* @param svgDocument
* the document to output
* @param writer
* the stream to write the document to
*/
// public static void writeSVG(SVGDocument svgDocument, Writer writer) {
// StringWriter sw = new StringWriter();
// try {
// Transformer transformer = TransformerFactory.newInstance().newTransformer();
// Source src = new DOMSource(svgDocument.getDocumentElement());
// transformer.transform(src, new StreamResult(sw));
//
// PrettyPrinter pp = new PrettyPrinter();
// pp.print(new StringReader(sw.toString()), writer);
// } catch (TransformerException | TranscoderException | IOException e) {
// e.printStackTrace(new PrintWriter(writer));
// }
// }
/**
* Generates an SVGDocument from DOT text by calling out to GraphViz.
*
* @param dotText
* @return an SVGDocument
* @throws IOException
*/
public static SVGDocument getSVG(String dotText,
WorkbenchConfiguration workbenchConfiguration) throws IOException {
String dotLocation = (String) workbenchConfiguration
.getProperty("taverna.dotlocation");
if (dotLocation == null)
dotLocation = "dot";
logger.debug("Invoking dot...");
Process dotProcess = exec(dotLocation, "-Tsvg");
StreamDevourer devourer = new StreamDevourer(
dotProcess.getInputStream());
devourer.start();
try (PrintWriter out = new PrintWriter(dotProcess.getOutputStream(),
true)) {
out.print(dotText);
out.flush();
}
String svgText = devourer.blockOnOutput();
/*
* Avoid TAV-424, replace buggy SVG outputted by "modern" GraphViz
* versions. http://www.graphviz.org/bugs/b1075.html
*
* Contributed by Marko Ullgren
*/
svgText = svgText.replaceAll("font-weight:regular",
"font-weight:normal");
logger.info(svgText);
// Fake URI, just used for internal references like #fish
return docFactory.createSVGDocument(
"http://taverna.sf.net/diagram/generated.svg",
new StringReader(svgText));
}
/**
* Generates DOT text with layout information from DOT text by calling out
* to GraphViz.
*
* @param dotText
* dot text
* @return dot text with layout information
* @throws IOException
*/
public static String getDot(String dotText,
WorkbenchConfiguration workbenchConfiguration) throws IOException {
String dotLocation = (String) workbenchConfiguration
.getProperty("taverna.dotlocation");
if (dotLocation == null)
dotLocation = "dot";
logger.debug("Invoking dot...");
Process dotProcess = exec(dotLocation, "-Tdot", "-Glp=0,0");
StreamDevourer devourer = new StreamDevourer(
dotProcess.getInputStream());
devourer.start();
try (PrintWriter out = new PrintWriter(dotProcess.getOutputStream(),
true)) {
out.print(dotText);
out.flush();
}
String dot = devourer.blockOnOutput();
// logger.info(dot);
return dot;
}
private static Process exec(String...args) throws IOException {
Process p = Runtime.getRuntime().exec(args);
/*
* Must create an error devourer otherwise stderr fills up and the
* process stalls!
*/
new StreamDevourer(p.getErrorStream()).start();
return p;
}
/**
* Returns the hex value for a <code>Color</code>. If color is null "none"
* is returned.
*
* @param color
* the <code>Color</code> to convert to hex code
* @return the hex value
*/
public static String getHexValue(Color color) {
if (color == null)
return "none";
return String.format("#%02x%02x%02x", color.getRed(), color.getGreen(),
color.getBlue());
}
/**
* Calculates the angle to rotate an arrow head to be placed on the end of a
* line.
*
* @param line
* the line to calculate the arrow head angle from
* @return the angle to rotate an arrow head
*/
public static double calculateAngle(Element line) {
float x1 = parseFloat(line.getAttribute(SVG_X1_ATTRIBUTE));
float y1 = parseFloat(line.getAttribute(SVG_Y1_ATTRIBUTE));
float x2 = parseFloat(line.getAttribute(SVG_X2_ATTRIBUTE));
float y2 = parseFloat(line.getAttribute(SVG_Y2_ATTRIBUTE));
return calculateAngle(x1, y1, x2, y2);
}
/**
* Calculates the angle to rotate an arrow head to be placed on the end of a
* line.
*
* @param pointList
* the list of <code>Point</code>s to calculate the arrow head
* angle from
* @return the angle to rotate an arrow head
*/
public static double calculateAngle(List<Point> pointList) {
double angle = 0d;
if (pointList.size() > 1) {
int listSize = pointList.size();
Point a = pointList.get(listSize - 2);
Point b = pointList.get(listSize - 1);
/*
* dot sometimes generates paths with the same point repeated at the
* end of the path, so move back along the path until two different
* points are found
*/
while (a.equals(b) && listSize > 2) {
b = a;
a = pointList.get(--listSize - 2);
}
angle = calculateAngle(a.x, a.y, b.x, b.y);
}
return angle;
}
/**
* Calculates the angle to rotate an arrow head to be placed on the end of a
* line.
*
* @param x1
* the x coordinate of the start of the line
* @param y1
* the y coordinate of the start of the line
* @param x2
* the x coordinate of the end of the line
* @param y2
* the y coordinate of the end of the line
* @return the angle to rotate an arrow head
*/
public static double calculateAngle(float x1, float y1, float x2, float y2) {
return atan2(y2 - y1, x2 - x1) * 180 / PI;
}
/**
* Calculates the points that make up the polygon for the specified
* {@link Shape}.
*
* @param shape
* the <code>Shape</code> to calculate points for
* @param width
* the width of the <code>Shape</code>
* @param height
* the height of the <code>Shape</code>
* @return the points that make up the polygon for the specified
* <code>Shape</code>
*/
public static String calculatePoints(Shape shape, int width, int height) {
StringBuilder sb = new StringBuilder();
switch (shape) {
case BOX:
case RECORD:
addPoint(sb, 0, 0);
addPoint(sb, width, 0);
addPoint(sb, width, height);
addPoint(sb, 0, height);
break;
case HOUSE:
addPoint(sb, width / 2f, 0);
addPoint(sb, width, height / 3f);
addPoint(sb, width, height - 3);
addPoint(sb, 0, height - 3);
addPoint(sb, 0, height / 3f);
break;
case INVHOUSE:
addPoint(sb, 0, 3);
addPoint(sb, width, 3);
addPoint(sb, width, height / 3f * 2f);
addPoint(sb, width / 2f, height);
addPoint(sb, 0, height / 3f * 2f);
break;
case TRIANGLE:
addPoint(sb, width / 2f, 0);
addPoint(sb, width, height);
addPoint(sb, 0, height);
break;
case INVTRIANGLE:
addPoint(sb, 0, 0);
addPoint(sb, width, 0);
addPoint(sb, width / 2f, height);
break;
default:
// Nothing to do for the others
break;
}
return sb.toString();
}
/**
* Appends x y coordinates to a <code>StringBuilder</code> in the format
* "x,y ".
*
* @param stringBuilder
* the <code>StringBuilder</code> to append the point to
* @param x
* the x coordinate
* @param y
* the y coordinate
*/
public static void addPoint(StringBuilder stringBuilder, float x, float y) {
stringBuilder.append(x).append(COMMA).append(y).append(SPACE);
}
/**
* Converts a list of points into a string format for a cubic Bezier curve.
*
* For example, "M100,200 C100,100 250,100 250,200". See
* http://www.w3.org/TR/SVG11/paths.html#PathDataCubicBezierCommands.
*
* @param pointList
* a list of points that describes a cubic Bezier curve
* @return a string that describes a cubic Bezier curve
*/
public static String getPath(List<Point> pointList) {
StringBuilder sb = new StringBuilder();
if (pointList != null && pointList.size() > 1) {
Point firstPoint = pointList.get(0);
sb.append(M).append(firstPoint.x).append(COMMA)
.append(firstPoint.y);
sb.append(SPACE);
Point secontPoint = pointList.get(1);
sb.append(C).append(secontPoint.x).append(COMMA)
.append(secontPoint.y);
for (int i = 2; i < pointList.size(); i++) {
Point point = pointList.get(i);
sb.append(SPACE).append(point.x).append(COMMA).append(point.y);
}
}
return sb.toString();
}
/**
* Creates an animation element.
*
* @param graphController
* the SVGGraphController to use to create the animation element
* @param elementType
* the type of animation element to create
* @param attribute
* the attribute that the animation should affect
* @param transformType
* the type of transform - use null not creating a transform
* animation
* @return an new animation element
*/
public static SVGOMAnimationElement createAnimationElement(
SVGGraphController graphController, String elementType,
String attribute, String transformType) {
SVGOMAnimationElement animationElement = (SVGOMAnimationElement) graphController
.createElement(elementType);
animationElement.setAttribute(SMIL_ATTRIBUTE_NAME_ATTRIBUTE, attribute);
if (transformType != null)
animationElement.setAttribute(SVG_TYPE_ATTRIBUTE, transformType);
animationElement.setAttribute(SMIL_FILL_ATTRIBUTE, SMIL_FREEZE_VALUE);
return animationElement;
}
/**
* Adds an animation to the SVG element and starts the animation.
*
* @param animate
* that animation element
* @param element
* the element to animate
* @param duration
* the duration of the animation in milliseconds
* @param from
* the starting point for the animation, can be null
* @param to
* the end point for the animation, cannot be null
*/
public static void animate(SVGOMAnimationElement animate, SVGElement element, int duration,
String from, String to) {
animate.setAttribute(SMIL_DUR_ATTRIBUTE, duration + "ms");
if (from != null)
animate.setAttribute(SMIL_FROM_ATTRIBUTE, from);
animate.setAttribute(SMIL_TO_ATTRIBUTE, to);
element.appendChild(animate);
try {
animate.beginElement();
} catch (NullPointerException e) {
}
}
/**
* Adjusts the length of <code>pointList</code> by adding or removing points
* to make the length equal to <code>size</code>. If <code>pointList</code>
* is shorter than <code>size</code> the last point is repeated. If
* <code>pointList</code> is longer than <code>size</code> points at the end
* of the list are removed.
*
* @param pointList
* the path to adjust
* @param size
* the required size for <code>pointList</code>
*/
public static void adjustPathLength(List<Point> pointList, int size) {
if (pointList.size() < size) {
Point lastPoint = pointList.get(pointList.size() - 1);
for (int i = pointList.size(); i < size; i++)
pointList.add(lastPoint);
} else if (pointList.size() > size) {
for (int i = pointList.size(); i > size; i--)
pointList.remove(i - 1);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.functions;
import org.apache.flink.table.dataformat.BinaryString;
import org.apache.flink.table.dataformat.BinaryStringUtil;
import org.apache.flink.table.dataformat.Decimal;
import org.apache.flink.table.runtime.util.JsonUtils;
import org.apache.flink.table.utils.EncodingUtils;
import org.apache.flink.table.utils.ThreadLocalCache;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Base64;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.regex.MatchResult;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Built-in scalar runtime functions.
*
* <p>NOTE: Before you add functions here, check if Calcite provides it in
* {@code org.apache.calcite.runtime.SqlFunctions}. Furthermore, make sure
* to implement the function efficiently. Sometimes it makes sense to create a
* {@code org.apache.flink.table.codegen.calls.CallGenerator} instead to avoid
* massive object creation and reuse instances.
*/
public class SqlFunctionUtils {
private static final Logger LOG = LoggerFactory.getLogger(SqlFunctionUtils.class);
private static final ThreadLocalCache<String, Pattern> REGEXP_PATTERN_CACHE =
new ThreadLocalCache<String, Pattern>() {
@Override
public Pattern getNewInstance(String regex) {
return Pattern.compile(regex);
}
};
private static final ThreadLocalCache<String, URL> URL_CACHE =
new ThreadLocalCache<String, URL>() {
public URL getNewInstance(String url) {
try {
return new URL(url);
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
};
private static final Map<String, String> EMPTY_MAP = new HashMap<>(0);
public static double exp(Decimal d) {
return Math.exp(d.doubleValue());
}
public static double power(double base, Decimal exponent) {
return Math.pow(base, exponent.doubleValue());
}
public static double power(Decimal base, Decimal exponent) {
return Math.pow(base.doubleValue(), exponent.doubleValue());
}
public static double power(Decimal base, double exponent) {
return Math.pow(base.doubleValue(), exponent);
}
public static double cosh(Decimal x) {
return Math.cosh(x.doubleValue());
}
public static double acos(Decimal a) {
return Math.acos(a.doubleValue());
}
public static double asin(Decimal a) {
return Math.asin(a.doubleValue());
}
public static double atan(Decimal a) {
return Math.atan(a.doubleValue());
}
public static double atan2(Decimal y, Decimal x) {
return Math.atan2(y.doubleValue(), x.doubleValue());
}
public static double sin(Decimal a) {
return Math.sin(a.doubleValue());
}
public static double sinh(Decimal a) {
return Math.sinh(a.doubleValue());
}
public static double cos(Decimal a) {
return Math.cos(a.doubleValue());
}
public static double tan(Decimal a) {
return Math.tan(a.doubleValue());
}
/**
* Calculates the hyperbolic tangent of a big decimal number.
*/
public static double tanh(Decimal a) {
return Math.tanh(a.doubleValue());
}
public static double cot(Decimal a) {
return 1.0d / Math.tan(a.doubleValue());
}
public static double degrees(Decimal angrad) {
return Math.toDegrees(angrad.doubleValue());
}
public static double radians(Decimal angdeg) {
return Math.toRadians(angdeg.doubleValue());
}
public static Decimal abs(Decimal a) {
return a.abs();
}
public static Decimal floor(Decimal a) {
return a.floor();
}
public static Decimal ceil(Decimal a) {
return a.ceil();
}
// -------------------------- natural logarithm ------------------------
/**
* Returns the natural logarithm of "x".
*/
public static double log(double x) {
return Math.log(x);
}
public static double log(Decimal x) {
return Math.log(x.doubleValue());
}
/**
* Returns the logarithm of "x" with base "base".
*/
public static double log(double base, double x) {
return Math.log(x) / Math.log(base);
}
public static double log(double base, Decimal x) {
return log(base, x.doubleValue());
}
public static double log(Decimal base, double x) {
return log(base.doubleValue(), x);
}
public static double log(Decimal base, Decimal x) {
return log(base.doubleValue(), x.doubleValue());
}
/**
* Returns the logarithm of "a" with base 2.
*/
public static double log2(double x) {
return Math.log(x) / Math.log(2);
}
public static double log2(Decimal x) {
return log2(x.doubleValue());
}
public static double log10(double x) {
return Math.log10(x);
}
public static double log10(Decimal x) {
return log10(x.doubleValue());
}
// -------------------------- string functions ------------------------
/**
* Returns the string str left-padded with the string pad to a length of len characters.
* If str is longer than len, the return value is shortened to len characters.
*/
public static String lpad(String base, int len, String pad) {
if (len < 0 || "".equals(pad)) {
return null;
} else if (len == 0) {
return "";
}
char[] data = new char[len];
char[] baseChars = base.toCharArray();
char[] padChars = pad.toCharArray();
// the length of the padding needed
int pos = Math.max(len - base.length(), 0);
// copy the padding
for (int i = 0; i < pos; i += pad.length()) {
for (int j = 0; j < pad.length() && j < pos - i; j++) {
data[i + j] = padChars[j];
}
}
// copy the base
int i = 0;
while (pos + i < len && i < base.length()) {
data[pos + i] = baseChars[i];
i += 1;
}
return new String(data);
}
/**
* Returns the string str right-padded with the string pad to a length of len characters.
* If str is longer than len, the return value is shortened to len characters.
*/
public static String rpad(String base, int len, String pad) {
if (len < 0 || "".equals(pad)) {
return null;
} else if (len == 0) {
return "";
}
char[] data = new char[len];
char[] baseChars = base.toCharArray();
char[] padChars = pad.toCharArray();
int pos = 0;
// copy the base
while (pos < base.length() && pos < len) {
data[pos] = baseChars[pos];
pos += 1;
}
// copy the padding
while (pos < len) {
int i = 0;
while (i < pad.length() && i < len - pos) {
data[pos + i] = padChars[i];
i += 1;
}
pos += pad.length();
}
return new String(data);
}
/**
* Returns a string that repeats the base string n times.
*/
public static String repeat(String str, int repeat) {
return EncodingUtils.repeat(str, repeat);
}
/**
* Replaces all the old strings with the replacement string.
*/
public static String replace(String str, String oldStr, String replacement) {
return str.replace(oldStr, replacement);
}
/**
* Split target string with custom separator and pick the index-th(start with 0) result.
*
* @param str target string.
* @param separator custom separator.
* @param index index of the result which you want.
* @return the string at the index of split results.
*/
public static String splitIndex(String str, String separator, int index) {
if (index < 0) {
return null;
}
String[] values = StringUtils.splitByWholeSeparatorPreserveAllTokens(str, separator);
if (index >= values.length) {
return null;
} else {
return values[index];
}
}
/**
* Split target string with custom separator and pick the index-th(start with 0) result.
*
* @param str target string.
* @param character int value of the separator character
* @param index index of the result which you want.
* @return the string at the index of split results.
*/
public static String splitIndex(String str, int character, int index) {
if (character > 255 || character < 1 || index < 0) {
return null;
}
String[] values = StringUtils.splitPreserveAllTokens(str, (char) character);
if (index >= values.length) {
return null;
} else {
return values[index];
}
}
/**
* Returns a string resulting from replacing all substrings
* that match the regular expression with replacement.
*/
public static String regexpReplace(String str, String regex, String replacement) {
if (str == null || regex == null || replacement == null) {
return null;
}
try {
return str.replaceAll(regex, Matcher.quoteReplacement(replacement));
} catch (Exception e) {
LOG.error(
String.format("Exception in regexpReplace('%s', '%s', '%s')", str, regex, replacement),
e);
// return null if exception in regex replace
return null;
}
}
/**
* Returns a string extracted with a specified regular expression and a regex match group index.
*/
public static String regexpExtract(String str, String regex, int extractIndex) {
if (str == null || regex == null) {
return null;
}
try {
Matcher m = Pattern.compile(regex).matcher(str);
if (m.find()) {
MatchResult mr = m.toMatchResult();
return mr.group(extractIndex);
}
} catch (Exception e) {
LOG.error(
String.format("Exception in regexpExtract('%s', '%s', '%d')", str, regex, extractIndex),
e);
}
return null;
}
public static String regexpExtract(String str, String regex, long extractIndex) {
return regexpExtract(str, regex, (int) extractIndex);
}
/**
* Returns the first string extracted with a specified regular expression.
*/
public static String regexpExtract(String str, String regex) {
return regexpExtract(str, regex, 0);
}
/**
* Parse string as key-value string and return the value matches key name.
* example:
* keyvalue('k1=v1;k2=v2', ';', '=', 'k2') = 'v2'
* keyvalue('k1:v1,k2:v2', ',', ':', 'k3') = NULL
*
* @param str target string.
* @param pairSeparator separator between key-value tuple.
* @param kvSeparator separator between key and value.
* @param keyName name of the key whose value you want return.
* @return target value.
*/
public static BinaryString keyValue(
BinaryString str, BinaryString pairSeparator, BinaryString kvSeparator, BinaryString keyName) {
if (str == null || str.getSizeInBytes() == 0) {
return null;
}
if (pairSeparator != null && pairSeparator.getSizeInBytes() == 1 &&
kvSeparator != null && kvSeparator.getSizeInBytes() == 1) {
return BinaryStringUtil.keyValue(str, pairSeparator.byteAt(0), kvSeparator.byteAt(0), keyName);
} else {
return BinaryString.fromString(
keyValue(
BinaryStringUtil.safeToString(str),
BinaryStringUtil.safeToString(pairSeparator),
BinaryStringUtil.safeToString(kvSeparator),
BinaryStringUtil.safeToString(keyName)));
}
}
private static String keyValue(String str, String pairSeparator, String kvSeparator, String keyName) {
try {
if (StringUtils.isEmpty(str)) {
return null;
}
String[] values = StringUtils.split(str, pairSeparator);
for (String value : values) {
if (!StringUtils.isEmpty(value)) {
String[] kv = StringUtils.split(kvSeparator);
if (kv != null && kv.length == 2 && kv[0].equals(keyName)) {
return kv[1];
}
}
}
return null;
} catch (Exception e) {
LOG.error("Exception when parse key-value", e);
return null;
}
}
/**
* Calculate the hash value of a given string.
*
* @param algorithm message digest algorithm.
* @param str string to hash.
* @return hash value of string.
*/
public static String hash(String algorithm, String str) {
return hash(algorithm, str, "");
}
/**
* Calculate the hash value of a given string.
*
* @param algorithm message digest algorithm.
* @param str string to hash.
* @param charsetName charset of string.
* @return hash value of string.
*/
public static String hash(String algorithm, String str, String charsetName) {
try {
byte[] digest = MessageDigest
.getInstance(algorithm)
.digest(strToBytesWithCharset(str, charsetName));
return EncodingUtils.hex(digest);
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException("Unsupported algorithm: " + algorithm, e);
}
}
private static byte[] strToBytesWithCharset(String str, String charsetName) {
byte[] byteArray = null;
if (!StringUtils.isEmpty(charsetName)) {
try {
byteArray = str.getBytes(charsetName);
} catch (UnsupportedEncodingException e) {
LOG.warn("Unsupported encoding: " + charsetName + ", fallback to system charset", e);
byteArray = null;
}
}
if (byteArray == null) {
byteArray = str.getBytes(StandardCharsets.UTF_8);
}
return byteArray;
}
/**
* Parse url and return various components of the URL.
* If accept any null arguments, return null.
*
* @param urlStr URL string.
* @param partToExtract determines which components would return.
* accept values:
* HOST,PATH,QUERY,REF,
* PROTOCOL,FILE,AUTHORITY,USERINFO
* @return target value.
*/
public static String parseUrl(String urlStr, String partToExtract) {
URL url;
try {
url = URL_CACHE.get(urlStr);
} catch (Exception e) {
LOG.error("Parse URL error: " + urlStr, e);
return null;
}
if ("HOST".equals(partToExtract)) {
return url.getHost();
}
if ("PATH".equals(partToExtract)) {
return url.getPath();
}
if ("QUERY".equals(partToExtract)) {
return url.getQuery();
}
if ("REF".equals(partToExtract)) {
return url.getRef();
}
if ("PROTOCOL".equals(partToExtract)) {
return url.getProtocol();
}
if ("FILE".equals(partToExtract)) {
return url.getFile();
}
if ("AUTHORITY".equals(partToExtract)) {
return url.getAuthority();
}
if ("USERINFO".equals(partToExtract)) {
return url.getUserInfo();
}
return null;
}
/**
* Parse url and return various parameter of the URL.
* If accept any null arguments, return null.
*
* @param urlStr URL string.
* @param partToExtract must be QUERY, or return null.
* @param key parameter name.
* @return target value.
*/
public static String parseUrl(String urlStr, String partToExtract, String key) {
if (!"QUERY".equals(partToExtract)) {
return null;
}
String query = parseUrl(urlStr, partToExtract);
if (query == null) {
return null;
}
Pattern p = Pattern.compile("(&|^)" + Pattern.quote(key) + "=([^&]*)");
Matcher m = p.matcher(query);
if (m.find()) {
return m.group(2);
}
return null;
}
public static int divideInt(int a, int b) {
return a / b;
}
public static String subString(String str, long start, long len) {
if (len < 0) {
LOG.error("len of 'substring(str, start, len)' must be >= 0 and Int type, but len = {0}", len);
return null;
}
if (len > Integer.MAX_VALUE || start > Integer.MAX_VALUE) {
LOG.error("len or start of 'substring(str, start, len)' must be Int type, but len = {0}, start = {0}", len, start);
return null;
}
int length = (int) len;
int pos = (int) start;
if (str.isEmpty()) {
return "";
}
int startPos;
int endPos;
if (pos > 0) {
startPos = pos - 1;
if (startPos >= str.length()) {
return "";
}
} else if (pos < 0) {
startPos = str.length() + pos;
if (startPos < 0) {
return "";
}
} else {
startPos = 0;
}
if ((str.length() - startPos) < length) {
endPos = str.length();
} else {
endPos = startPos + length;
}
return str.substring(startPos, endPos);
}
public static String subString(String str, long start) {
return subString(str, start, Integer.MAX_VALUE);
}
public static String chr(long chr) {
if (chr < 0) {
return "";
} else if ((chr & 0xFF) == 0) {
return String.valueOf(Character.MIN_VALUE);
} else {
return String.valueOf((char) (chr & 0xFF));
}
}
public static String overlay(String s, String r, long start, long length) {
if (start <= 0 || start > s.length()) {
return s;
} else {
StringBuilder sb = new StringBuilder();
int startPos = (int) start;
int len = (int) length;
sb.append(s, 0, startPos - 1);
sb.append(r);
if ((startPos + len) <= s.length() && len > 0) {
sb.append(s.substring(startPos - 1 + len));
}
return sb.toString();
}
}
public static String overlay(String s, String r, long start) {
return overlay(s, r, start, r.length());
}
public static int position(BinaryString seek, BinaryString s) {
return position(seek, s, 1);
}
public static int position(BinaryString seek, BinaryString s, int from) {
return s.indexOf(seek, from - 1) + 1;
}
public static int instr(BinaryString str, BinaryString subString, int startPosition, int nthAppearance) {
if (nthAppearance <= 0) {
throw new IllegalArgumentException("nthAppearance must be positive!");
}
if (startPosition == 0) {
return 0;
} else if (startPosition > 0) {
int startIndex = startPosition;
int index = 0;
for (int i = 0; i < nthAppearance; i++) {
index = str.indexOf(subString, startIndex - 1) + 1;
if (index == 0) {
return 0;
}
startIndex = index + 1;
}
return index;
} else {
int pos = instr(
BinaryStringUtil.reverse(str),
BinaryStringUtil.reverse(subString),
-startPosition,
nthAppearance);
if (pos == 0) {
return 0;
} else {
return str.numChars() + 2 - pos - subString.numChars();
}
}
}
/**
* Returns the hex string of a long argument.
*/
public static String hex(long x) {
return Long.toHexString(x).toUpperCase();
}
/**
* Returns the hex string of a string argument.
*/
public static String hex(String x) {
return EncodingUtils.hex(x.getBytes(StandardCharsets.UTF_8)).toUpperCase();
}
/**
* Creates a map by parsing text. Split text into key-value pairs
* using two delimiters. The first delimiter separates pairs, and the
* second delimiter separates key and value. If only one parameter is given,
* default delimiters are used: ',' as delimiter1 and '=' as delimiter2.
* @param text the input text
* @return the map
*/
public static Map<String, String> strToMap(String text) {
return strToMap(text, ",", "=");
}
/**
* Creates a map by parsing text. Split text into key-value pairs
* using two delimiters. The first delimiter separates pairs, and the
* second delimiter separates key and value.
* @param text the input text
* @param listDelimiter the delimiter to separates pairs
* @param keyValueDelimiter the delimiter to separates key and value
* @return the map
*/
public static Map<String, String> strToMap(String text, String listDelimiter, String keyValueDelimiter) {
if (StringUtils.isEmpty(text)) {
return EMPTY_MAP;
}
String[] keyValuePairs = text.split(listDelimiter);
Map<String, String> ret = new HashMap<>(keyValuePairs.length);
for (String keyValuePair : keyValuePairs) {
String[] keyValue = keyValuePair.split(keyValueDelimiter, 2);
if (keyValue.length < 2) {
ret.put(keyValuePair, null);
} else {
ret.put(keyValue[0], keyValue[1]);
}
}
return ret;
}
public static String jsonValue(String jsonString, String pathString) {
// TODO: refactor this to use jackson ?
return JsonUtils.getInstance().getJsonObject(jsonString, pathString);
}
// SQL ROUND
/** SQL <code>ROUND</code> operator applied to int values. */
public static int sround(int b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to int values. */
public static int sround(int b0, int b1) {
return sround(BigDecimal.valueOf(b0), b1).intValue();
}
/** SQL <code>ROUND</code> operator applied to long values. */
public static long sround(long b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to long values. */
public static long sround(long b0, int b1) {
return sround(BigDecimal.valueOf(b0), b1).longValue();
}
/** SQL <code>ROUND</code> operator applied to BigDecimal values. */
public static BigDecimal sround(BigDecimal b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to BigDecimal values. */
public static BigDecimal sround(BigDecimal b0, int b1) {
return b0.movePointRight(b1)
.setScale(0, RoundingMode.HALF_UP).movePointLeft(b1);
}
/** SQL <code>ROUND</code> operator applied to double values. */
public static double sround(double b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to double values. */
public static double sround(double b0, int b1) {
return sround(BigDecimal.valueOf(b0), b1).doubleValue();
}
/** SQL <code>ROUND</code> operator applied to Decimal values. */
public static Decimal sround(Decimal b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to Decimal values. */
public static Decimal sround(Decimal b0, int b1) {
return Decimal.sround(b0, b1);
}
public static Decimal sign(Decimal b0) {
return Decimal.sign(b0);
}
public static boolean isDecimal(Object obj) {
if ((obj instanceof Long)
|| (obj instanceof Integer)
|| (obj instanceof Short)
|| (obj instanceof Byte)
|| (obj instanceof Float)
|| (obj instanceof Double)
|| (obj instanceof BigDecimal)
|| (obj instanceof BigInteger)){
return true;
}
if (obj instanceof String || obj instanceof Character){
String s = obj.toString();
if (s.isEmpty()) {
return false;
}
return isInteger(s) || isLong(s) || isDouble(s);
} else {
return false;
}
}
private static boolean isInteger(String s) {
boolean flag = true;
try {
Integer.parseInt(s);
} catch (NumberFormatException e) {
flag = false;
}
return flag;
}
private static boolean isLong(String s) {
boolean flag = true;
try {
Long.parseLong(s);
} catch (NumberFormatException e) {
flag = false;
}
return flag;
}
private static boolean isDouble(String s) {
boolean flag = true;
try {
Double.parseDouble(s);
} catch (NumberFormatException e) {
flag = false;
}
return flag;
}
public static boolean isDigit(Object obj) {
if ((obj instanceof Long)
|| (obj instanceof Integer)
|| (obj instanceof Short)
|| (obj instanceof Byte)){
return true;
}
if (obj instanceof String){
String s = obj.toString();
if (s.isEmpty()) {
return false;
}
return StringUtils.isNumeric(s);
}
else {
return false;
}
}
public static boolean isAlpha(Object obj) {
if (obj == null){
return false;
}
if (!(obj instanceof String)){
return false;
}
String s = obj.toString();
if ("".equals(s)) {
return false;
}
return StringUtils.isAlpha(s);
}
public static Integer hashCode(String str){
if (str == null) {
return Integer.MIN_VALUE;
}
return Math.abs(str.hashCode());
}
public static Boolean regExp(String s, String regex){
if (regex.length() == 0) {
return false;
}
try {
return (REGEXP_PATTERN_CACHE.get(regex)).matcher(s).find(0);
} catch (Exception e) {
LOG.error("Exception when compile and match regex:" +
regex + " on: " + s, e);
return false;
}
}
public static Byte bitAnd(Byte a, Byte b) {
if (a == null || b == null) {
return 0;
}
return (byte) (a & b);
}
public static Short bitAnd(Short a, Short b) {
if (a == null || b == null) {
return 0;
}
return (short) (a & b);
}
public static Integer bitAnd(Integer a, Integer b) {
if (a == null || b == null) {
return 0;
}
return a & b;
}
public static Long bitAnd(Long a, Long b) {
if (a == null || b == null) {
return 0L;
}
return a & b;
}
public static Byte bitNot(Byte a) {
if (a == null) {
a = 0;
}
return (byte) (~a);
}
public static Short bitNot(Short a) {
if (a == null) {
a = 0;
}
return (short) (~a);
}
public static Integer bitNot(Integer a) {
if (a == null) {
a = 0;
}
return ~a;
}
public static Long bitNot(Long a) {
if (a == null) {
a = 0L;
}
return ~a;
}
public static Byte bitOr(Byte a, Byte b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return (byte) (a | b);
}
public static Short bitOr(Short a, Short b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return (short) (a | b);
}
public static Integer bitOr(Integer a, Integer b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return a | b;
}
public static Long bitOr(Long a, Long b) {
if (a == null || b == null) {
if (a == null) {
a = 0L;
}
if (b == null) {
b = 0L;
}
}
return a | b;
}
public static Byte bitXor(Byte a, Byte b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return (byte) (a ^ b);
}
public static Short bitXor(Short a, Short b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return (short) (a ^ b);
}
public static Integer bitXor(Integer a, Integer b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return a ^ b;
}
public static Long bitXor(Long a, Long b) {
if (a == null || b == null) {
if (a == null) {
a = 0L;
}
if (b == null) {
b = 0L;
}
}
return a ^ b;
}
public static String toBase64(BinaryString bs){
return toBase64(bs.getBytes());
}
public static String toBase64(byte[] bytes){
return Base64.getEncoder().encodeToString(bytes);
}
public static byte[] fromBase64(BinaryString bs){
return Base64.getDecoder().decode(bs.getBytes());
}
public static String uuid(){
return UUID.randomUUID().toString();
}
public static String uuid(byte[] b){
return UUID.nameUUIDFromBytes(b).toString();
}
/** SQL <code>TRUNCATE</code> operator applied to BigDecimal values. */
public static Decimal struncate(Decimal b0) {
return struncate(b0, 0);
}
public static Decimal struncate(Decimal b0, int b1) {
if (b1 >= b0.getScale()) {
return b0;
}
BigDecimal b2 = b0.toBigDecimal().movePointRight(b1)
.setScale(0, RoundingMode.DOWN).movePointLeft(b1);
int p = b0.getPrecision();
int s = b0.getScale();
if (b1 < 0) {
return Decimal.fromBigDecimal(b2, Math.min(38, 1 + p - s), 0);
} else {
return Decimal.fromBigDecimal(b2, 1 + p - s + b1, b1);
}
}
/** SQL <code>TRUNCATE</code> operator applied to double values. */
public static float struncate(float b0) {
return struncate(b0, 0);
}
public static float struncate(float b0, int b1) {
return (float) struncate(Decimal.castFrom((double) b0, 38, 18), b1).doubleValue();
}
}
| |
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.PostConstruct;
import org.assertj.core.api.Assertions;
import org.assertj.core.api.Condition;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.ArgumentCaptor;
import reactor.core.publisher.Mono;
import org.springframework.beans.BeansException;
import org.springframework.beans.CachedIntrospectionResults;
import org.springframework.beans.factory.BeanCreationException;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.BeanNameGenerator;
import org.springframework.beans.factory.support.DefaultBeanNameGenerator;
import org.springframework.boot.context.event.ApplicationEnvironmentPreparedEvent;
import org.springframework.boot.context.event.ApplicationFailedEvent;
import org.springframework.boot.context.event.ApplicationPreparedEvent;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.boot.context.event.ApplicationStartingEvent;
import org.springframework.boot.testsupport.rule.OutputCapture;
import org.springframework.boot.web.embedded.netty.NettyReactiveWebServerFactory;
import org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory;
import org.springframework.boot.web.reactive.context.ReactiveWebApplicationContext;
import org.springframework.boot.web.reactive.context.ReactiveWebServerApplicationContext;
import org.springframework.boot.web.servlet.context.AnnotationConfigServletWebServerApplicationContext;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.ApplicationContextException;
import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.AnnotationConfigUtils;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.event.ApplicationEventMulticaster;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.context.event.SimpleApplicationEventMulticaster;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.context.support.StaticApplicationContext;
import org.springframework.core.Ordered;
import org.springframework.core.env.CommandLinePropertySource;
import org.springframework.core.env.CompositePropertySource;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.Environment;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.PropertySource;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.http.server.reactive.HttpHandler;
import org.springframework.test.context.support.TestPropertySourceUtils;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.ConfigurableWebEnvironment;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.StandardServletEnvironment;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
/**
* Tests for {@link SpringApplication}.
*
* @author Phillip Webb
* @author Dave Syer
* @author Andy Wilkinson
* @author Christian Dupuis
* @author Stephane Nicoll
* @author Jeremy Rickard
* @author Craig Burke
* @author Madhura Bhave
* @author Brian Clozel
*/
public class SpringApplicationTests {
private String headlessProperty;
@Rule
public ExpectedException thrown = ExpectedException.none();
@Rule
public OutputCapture output = new OutputCapture();
private ConfigurableApplicationContext context;
private Environment getEnvironment() {
if (this.context != null) {
return this.context.getEnvironment();
}
throw new IllegalStateException("Could not obtain Environment");
}
@Before
public void storeAndClearHeadlessProperty() {
this.headlessProperty = System.getProperty("java.awt.headless");
System.clearProperty("java.awt.headless");
}
@After
public void reinstateHeadlessProperty() {
if (this.headlessProperty == null) {
System.clearProperty("java.awt.headless");
}
else {
System.setProperty("java.awt.headless", this.headlessProperty);
}
}
@After
public void cleanUp() {
if (this.context != null) {
this.context.close();
}
System.clearProperty("spring.main.banner-mode");
System.clearProperty(CachedIntrospectionResults.IGNORE_BEANINFO_PROPERTY_NAME);
}
@Test
public void sourcesMustNotBeNull() throws Exception {
this.thrown.expect(IllegalArgumentException.class);
this.thrown.expectMessage("PrimarySources must not be null");
new SpringApplication((Class<?>[]) null).run();
}
@Test
public void sourcesMustNotBeEmpty() throws Exception {
this.thrown.expect(IllegalArgumentException.class);
this.thrown.expectMessage("Sources must not be empty");
new SpringApplication().run();
}
@Test
public void sourcesMustBeAccessible() throws Exception {
this.thrown.expect(IllegalStateException.class);
this.thrown.expectMessage("Cannot load configuration");
new SpringApplication(InaccessibleConfiguration.class).run();
}
@Test
public void customBanner() throws Exception {
SpringApplication application = spy(new SpringApplication(ExampleConfig.class));
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run("--banner.location=classpath:test-banner.txt");
assertThat(this.output.toString()).startsWith("Running a Test!");
}
@Test
public void customBannerWithProperties() throws Exception {
SpringApplication application = spy(new SpringApplication(ExampleConfig.class));
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run(
"--banner.location=classpath:test-banner-with-placeholder.txt",
"--test.property=123456");
assertThat(this.output.toString()).containsPattern("Running a Test!\\s+123456");
}
@Test
public void imageBannerAndTextBanner() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
MockResourceLoader resourceLoader = new MockResourceLoader();
resourceLoader.addResource("banner.gif", "black-and-white.gif");
resourceLoader.addResource("banner.txt", "foobar.txt");
application.setWebApplicationType(WebApplicationType.NONE);
application.setResourceLoader(resourceLoader);
application.run();
assertThat(this.output.toString()).contains("@@@@").contains("Foo Bar");
}
@Test
public void imageBannerLoads() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
MockResourceLoader resourceLoader = new MockResourceLoader();
resourceLoader.addResource("banner.gif", "black-and-white.gif");
application.setWebApplicationType(WebApplicationType.NONE);
application.setResourceLoader(resourceLoader);
application.run();
assertThat(this.output.toString()).contains("@@@@@@");
}
@Test
public void logsNoActiveProfiles() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run();
assertThat(this.output.toString()).contains(
"No active profile set, falling back to default profiles: default");
}
@Test
public void logsActiveProfiles() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run("--spring.profiles.active=myprofiles");
assertThat(this.output.toString())
.contains("The following profiles are active: myprofile");
}
@Test
public void enableBannerInLogViaProperty() throws Exception {
SpringApplication application = spy(new SpringApplication(ExampleConfig.class));
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run("--spring.main.banner-mode=log");
verify(application, atLeastOnce()).setBannerMode(Banner.Mode.LOG);
assertThat(this.output.toString()).contains("o.s.b.SpringApplication");
}
@Test
public void setIgnoreBeanInfoPropertyByDefault() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run();
String property = System
.getProperty(CachedIntrospectionResults.IGNORE_BEANINFO_PROPERTY_NAME);
assertThat(property).isEqualTo("true");
}
@Test
public void disableIgnoreBeanInfoProperty() throws Exception {
System.setProperty(CachedIntrospectionResults.IGNORE_BEANINFO_PROPERTY_NAME,
"false");
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run();
String property = System
.getProperty(CachedIntrospectionResults.IGNORE_BEANINFO_PROPERTY_NAME);
assertThat(property).isEqualTo("false");
}
@Test
public void triggersConfigFileApplicationListenerBeforeBinding() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run("--spring.config.name=bindtoapplication");
Field field = ReflectionUtils.findField(SpringApplication.class, "bannerMode");
field.setAccessible(true);
assertThat((Banner.Mode) field.get(application)).isEqualTo(Banner.Mode.OFF);
}
@Test
public void bindsSystemPropertyToSpringApplication() throws Exception {
System.setProperty("spring.main.banner-mode", "off");
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run();
Field field = ReflectionUtils.findField(SpringApplication.class, "bannerMode");
field.setAccessible(true);
assertThat((Banner.Mode) field.get(application)).isEqualTo(Banner.Mode.OFF);
}
@Test
public void customId() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run("--spring.application.name=foo");
assertThat(this.context.getId()).startsWith("foo");
}
@Test
public void specificApplicationContextClass() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setApplicationContextClass(StaticApplicationContext.class);
this.context = application.run();
assertThat(this.context).isInstanceOf(StaticApplicationContext.class);
}
@Test
public void specificApplicationContextInitializer() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
final AtomicReference<ApplicationContext> reference = new AtomicReference<>();
application.setInitializers(Arrays.asList(
(ApplicationContextInitializer<ConfigurableApplicationContext>) reference::set));
this.context = application.run("--foo=bar");
assertThat(this.context).isSameAs(reference.get());
// Custom initializers do not switch off the defaults
assertThat(getEnvironment().getProperty("foo")).isEqualTo("bar");
}
@Test
public void applicationRunningEventListener() {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
final AtomicReference<SpringApplication> reference = new AtomicReference<>();
class ApplicationReadyEventListener
implements ApplicationListener<ApplicationReadyEvent> {
@Override
public void onApplicationEvent(ApplicationReadyEvent event) {
reference.set(event.getSpringApplication());
}
}
application.addListeners(new ApplicationReadyEventListener());
this.context = application.run("--foo=bar");
assertThat(application).isSameAs(reference.get());
}
@Test
public void contextRefreshedEventListener() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
final AtomicReference<ApplicationContext> reference = new AtomicReference<>();
class InitializerListener implements ApplicationListener<ContextRefreshedEvent> {
@Override
public void onApplicationEvent(ContextRefreshedEvent event) {
reference.set(event.getApplicationContext());
}
}
application.setListeners(Arrays.asList(new InitializerListener()));
this.context = application.run("--foo=bar");
assertThat(this.context).isSameAs(reference.get());
// Custom initializers do not switch off the defaults
assertThat(getEnvironment().getProperty("foo")).isEqualTo("bar");
}
@Test
public void eventsOrder() {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
final List<ApplicationEvent> events = new ArrayList<>();
class ApplicationRunningEventListener
implements ApplicationListener<ApplicationEvent> {
@Override
public void onApplicationEvent(ApplicationEvent event) {
events.add((event));
}
}
application.addListeners(new ApplicationRunningEventListener());
this.context = application.run();
assertThat(events).hasSize(5);
assertThat(events.get(0)).isInstanceOf(ApplicationStartingEvent.class);
assertThat(events.get(1)).isInstanceOf(ApplicationEnvironmentPreparedEvent.class);
assertThat(events.get(2)).isInstanceOf(ApplicationPreparedEvent.class);
assertThat(events.get(3)).isInstanceOf(ContextRefreshedEvent.class);
assertThat(events.get(4)).isInstanceOf(ApplicationReadyEvent.class);
}
@Test
public void defaultApplicationContext() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run();
assertThat(this.context).isInstanceOf(AnnotationConfigApplicationContext.class);
}
@Test
public void defaultApplicationContextForWeb() throws Exception {
SpringApplication application = new SpringApplication(ExampleWebConfig.class);
application.setWebApplicationType(WebApplicationType.SERVLET);
this.context = application.run();
assertThat(this.context)
.isInstanceOf(AnnotationConfigServletWebServerApplicationContext.class);
}
@Test
public void defaultApplicationContextForReactiveWeb() throws Exception {
SpringApplication application = new SpringApplication(
ExampleReactiveWebConfig.class);
application.setWebApplicationType(WebApplicationType.REACTIVE);
this.context = application.run();
assertThat(this.context).isInstanceOf(ReactiveWebServerApplicationContext.class);
}
@Test
public void customEnvironment() throws Exception {
TestSpringApplication application = new TestSpringApplication(
ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
ConfigurableEnvironment environment = new StandardEnvironment();
application.setEnvironment(environment);
this.context = application.run();
verify(application.getLoader()).setEnvironment(environment);
}
@Test
public void customResourceLoader() throws Exception {
TestSpringApplication application = new TestSpringApplication(
ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
ResourceLoader resourceLoader = new DefaultResourceLoader();
application.setResourceLoader(resourceLoader);
this.context = application.run();
verify(application.getLoader()).setResourceLoader(resourceLoader);
}
@Test
public void customResourceLoaderFromConstructor() throws Exception {
ResourceLoader resourceLoader = new DefaultResourceLoader();
TestSpringApplication application = new TestSpringApplication(resourceLoader,
ExampleWebConfig.class);
this.context = application.run();
verify(application.getLoader()).setResourceLoader(resourceLoader);
}
@Test
public void customBeanNameGenerator() throws Exception {
TestSpringApplication application = new TestSpringApplication(
ExampleWebConfig.class);
BeanNameGenerator beanNameGenerator = new DefaultBeanNameGenerator();
application.setBeanNameGenerator(beanNameGenerator);
this.context = application.run();
verify(application.getLoader()).setBeanNameGenerator(beanNameGenerator);
Object actualGenerator = this.context
.getBean(AnnotationConfigUtils.CONFIGURATION_BEAN_NAME_GENERATOR);
assertThat(actualGenerator).isSameAs(beanNameGenerator);
}
@Test
public void customBeanNameGeneratorWithNonWebApplication() throws Exception {
TestSpringApplication application = new TestSpringApplication(
ExampleWebConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
BeanNameGenerator beanNameGenerator = new DefaultBeanNameGenerator();
application.setBeanNameGenerator(beanNameGenerator);
this.context = application.run();
verify(application.getLoader()).setBeanNameGenerator(beanNameGenerator);
Object actualGenerator = this.context
.getBean(AnnotationConfigUtils.CONFIGURATION_BEAN_NAME_GENERATOR);
assertThat(actualGenerator).isSameAs(beanNameGenerator);
}
@Test
public void commandLinePropertySource() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
ConfigurableEnvironment environment = new StandardEnvironment();
application.setEnvironment(environment);
this.context = application.run("--foo=bar");
assertThat(environment).has(matchingPropertySource(
CommandLinePropertySource.class, "commandLineArgs"));
}
@Test
public void commandLinePropertySourceEnhancesEnvironment() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
ConfigurableEnvironment environment = new StandardEnvironment();
environment.getPropertySources().addFirst(new MapPropertySource("commandLineArgs",
Collections.<String, Object>singletonMap("foo", "original")));
application.setEnvironment(environment);
this.context = application.run("--foo=bar", "--bar=foo");
assertThat(environment).has(
matchingPropertySource(CompositePropertySource.class, "commandLineArgs"));
assertThat(environment.getProperty("bar")).isEqualTo("foo");
// New command line properties take precedence
assertThat(environment.getProperty("foo")).isEqualTo("bar");
CompositePropertySource composite = (CompositePropertySource) environment
.getPropertySources().get("commandLineArgs");
assertThat(composite.getPropertySources()).hasSize(2);
assertThat(composite.getPropertySources()).first().matches(
(source) -> source.getName().equals("springApplicationCommandLineArgs"),
"is named springApplicationCommandLineArgs");
assertThat(composite.getPropertySources()).element(1).matches(
(source) -> source.getName().equals("commandLineArgs"),
"is named commandLineArgs");
}
@Test
public void propertiesFileEnhancesEnvironment() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
ConfigurableEnvironment environment = new StandardEnvironment();
application.setEnvironment(environment);
this.context = application.run();
assertThat(environment.getProperty("foo")).isEqualTo("bucket");
}
@Test
public void addProfiles() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
application.setAdditionalProfiles("foo");
ConfigurableEnvironment environment = new StandardEnvironment();
application.setEnvironment(environment);
this.context = application.run();
assertThat(environment.acceptsProfiles("foo")).isTrue();
}
@Test
public void addProfilesOrder() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
application.setAdditionalProfiles("foo");
ConfigurableEnvironment environment = new StandardEnvironment();
application.setEnvironment(environment);
this.context = application.run("--spring.profiles.active=bar,spam");
// Command line should always come last
assertThat(environment.getActiveProfiles()).containsExactly("foo", "bar", "spam");
}
@Test
public void addProfilesOrderWithProperties() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
application.setAdditionalProfiles("other");
ConfigurableEnvironment environment = new StandardEnvironment();
application.setEnvironment(environment);
this.context = application.run();
// Active profile should win over default
assertThat(environment.getProperty("my.property"))
.isEqualTo("fromotherpropertiesfile");
}
@Test
public void emptyCommandLinePropertySourceNotAdded() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
ConfigurableEnvironment environment = new StandardEnvironment();
application.setEnvironment(environment);
this.context = application.run();
assertThat(environment.getProperty("foo")).isEqualTo("bucket");
}
@Test
public void disableCommandLinePropertySource() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
application.setAddCommandLineProperties(false);
ConfigurableEnvironment environment = new StandardEnvironment();
application.setEnvironment(environment);
this.context = application.run("--foo=bar");
assertThat(environment).doesNotHave(
matchingPropertySource(PropertySource.class, "commandLineArgs"));
}
@Test
public void runCommandLineRunnersAndApplicationRunners() throws Exception {
SpringApplication application = new SpringApplication(CommandLineRunConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run("arg");
assertThat(this.context).has(runTestRunnerBean("runnerA"));
assertThat(this.context).has(runTestRunnerBean("runnerB"));
assertThat(this.context).has(runTestRunnerBean("runnerC"));
}
@Test
public void loadSources() throws Exception {
Class<?>[] sources = { ExampleConfig.class, TestCommandLineRunner.class };
TestSpringApplication application = new TestSpringApplication(sources);
application.getSources().add("a");
application.setWebApplicationType(WebApplicationType.NONE);
application.setUseMockLoader(true);
this.context = application.run();
Set<Object> allSources = application.getAllSources();
assertThat(allSources).contains(ExampleConfig.class, TestCommandLineRunner.class,
"a");
}
@Test
public void wildcardSources() {
TestSpringApplication application = new TestSpringApplication();
application.getSources().add(
"classpath:org/springframework/boot/sample-${sample.app.test.prop}.xml");
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run();
}
@Test
public void run() throws Exception {
this.context = SpringApplication.run(ExampleWebConfig.class);
assertThat(this.context).isNotNull();
}
@Test
public void runComponents() throws Exception {
this.context = SpringApplication.run(
new Class<?>[] { ExampleWebConfig.class, Object.class }, new String[0]);
assertThat(this.context).isNotNull();
}
@Test
public void exit() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run();
assertThat(this.context).isNotNull();
assertThat(SpringApplication.exit(this.context)).isEqualTo(0);
}
@Test
public void exitWithExplicitCode() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
ExitCodeListener listener = new ExitCodeListener();
application.addListeners(listener);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run();
assertThat(this.context).isNotNull();
assertThat(SpringApplication.exit(this.context, (ExitCodeGenerator) () -> 2))
.isEqualTo(2);
assertThat(listener.getExitCode()).isEqualTo(2);
}
@Test
public void exitWithExplicitCodeFromException() throws Exception {
final SpringBootExceptionHandler handler = mock(SpringBootExceptionHandler.class);
SpringApplication application = new SpringApplication(
ExitCodeCommandLineRunConfig.class) {
@Override
SpringBootExceptionHandler getSpringBootExceptionHandler() {
return handler;
}
};
ExitCodeListener listener = new ExitCodeListener();
application.addListeners(listener);
application.setWebApplicationType(WebApplicationType.NONE);
try {
application.run();
fail("Did not throw");
}
catch (IllegalStateException ex) {
}
verify(handler).registerExitCode(11);
assertThat(listener.getExitCode()).isEqualTo(11);
}
@Test
public void exitWithExplicitCodeFromMappedException() throws Exception {
final SpringBootExceptionHandler handler = mock(SpringBootExceptionHandler.class);
SpringApplication application = new SpringApplication(
MappedExitCodeCommandLineRunConfig.class) {
@Override
SpringBootExceptionHandler getSpringBootExceptionHandler() {
return handler;
}
};
ExitCodeListener listener = new ExitCodeListener();
application.addListeners(listener);
application.setWebApplicationType(WebApplicationType.NONE);
try {
application.run();
fail("Did not throw");
}
catch (IllegalStateException ex) {
}
verify(handler).registerExitCode(11);
assertThat(listener.getExitCode()).isEqualTo(11);
}
@Test
public void exceptionFromRefreshIsHandledGracefully() throws Exception {
final SpringBootExceptionHandler handler = mock(SpringBootExceptionHandler.class);
SpringApplication application = new SpringApplication(
RefreshFailureConfig.class) {
@Override
SpringBootExceptionHandler getSpringBootExceptionHandler() {
return handler;
}
};
ExitCodeListener listener = new ExitCodeListener();
application.addListeners(listener);
application.setWebApplicationType(WebApplicationType.NONE);
try {
application.run();
fail("Did not throw");
}
catch (RuntimeException ex) {
}
ArgumentCaptor<RuntimeException> exceptionCaptor = ArgumentCaptor
.forClass(RuntimeException.class);
verify(handler).registerLoggedException(exceptionCaptor.capture());
assertThat(exceptionCaptor.getValue())
.hasCauseInstanceOf(RefreshFailureException.class);
assertThat(this.output.toString()).doesNotContain("NullPointerException");
}
@Test
public void defaultCommandLineArgs() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setDefaultProperties(StringUtils.splitArrayElementsIntoProperties(
new String[] { "baz=", "bar=spam" }, "="));
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run("--bar=foo", "bucket", "crap");
assertThat(this.context).isInstanceOf(AnnotationConfigApplicationContext.class);
assertThat(getEnvironment().getProperty("bar")).isEqualTo("foo");
assertThat(getEnvironment().getProperty("baz")).isEqualTo("");
}
@Test
public void commandLineArgsApplyToSpringApplication() throws Exception {
TestSpringApplication application = new TestSpringApplication(
ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run("--spring.main.banner-mode=OFF");
assertThat(application.getBannerMode()).isEqualTo(Banner.Mode.OFF);
}
@Test
public void registerShutdownHook() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setApplicationContextClass(SpyApplicationContext.class);
this.context = application.run();
SpyApplicationContext applicationContext = (SpyApplicationContext) this.context;
verify(applicationContext.getApplicationContext()).registerShutdownHook();
}
@Test
public void registerListener() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class,
ListenerConfig.class);
application.setApplicationContextClass(SpyApplicationContext.class);
final LinkedHashSet<ApplicationEvent> events = new LinkedHashSet<>();
application.addListeners((ApplicationListener<ApplicationEvent>) events::add);
this.context = application.run();
assertThat(events).hasAtLeastOneElementOfType(ApplicationPreparedEvent.class);
assertThat(events).hasAtLeastOneElementOfType(ContextRefreshedEvent.class);
verifyTestListenerEvents();
}
@Test
public void registerListenerWithCustomMulticaster() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class,
ListenerConfig.class, Multicaster.class);
application.setApplicationContextClass(SpyApplicationContext.class);
final LinkedHashSet<ApplicationEvent> events = new LinkedHashSet<>();
application.addListeners((ApplicationListener<ApplicationEvent>) events::add);
this.context = application.run();
assertThat(events).hasAtLeastOneElementOfType(ApplicationPreparedEvent.class);
assertThat(events).hasAtLeastOneElementOfType(ContextRefreshedEvent.class);
verifyTestListenerEvents();
}
@SuppressWarnings("unchecked")
private void verifyTestListenerEvents() {
ApplicationListener<ApplicationEvent> listener = this.context
.getBean("testApplicationListener", ApplicationListener.class);
verifyListenerEvents(listener, ContextRefreshedEvent.class,
ApplicationReadyEvent.class);
}
@SuppressWarnings("unchecked")
private void verifyListenerEvents(ApplicationListener<ApplicationEvent> listener,
Class<? extends ApplicationEvent>... eventTypes) {
for (Class<? extends ApplicationEvent> eventType : eventTypes) {
verify(listener).onApplicationEvent(isA(eventType));
}
verifyNoMoreInteractions(listener);
}
@SuppressWarnings("unchecked")
@Test
public void applicationListenerFromApplicationIsCalledWhenContextFailsRefreshBeforeListenerRegistration() {
ApplicationListener<ApplicationEvent> listener = mock(ApplicationListener.class);
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.addListeners(listener);
try {
application.run();
fail("Run should have failed with an ApplicationContextException");
}
catch (ApplicationContextException ex) {
verifyListenerEvents(listener, ApplicationStartingEvent.class,
ApplicationEnvironmentPreparedEvent.class,
ApplicationPreparedEvent.class, ApplicationFailedEvent.class);
}
}
@SuppressWarnings("unchecked")
@Test
public void applicationListenerFromApplicationIsCalledWhenContextFailsRefreshAfterListenerRegistration() {
ApplicationListener<ApplicationEvent> listener = mock(ApplicationListener.class);
SpringApplication application = new SpringApplication(
BrokenPostConstructConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
application.addListeners(listener);
try {
application.run();
fail("Run should have failed with a BeanCreationException");
}
catch (BeanCreationException ex) {
verifyListenerEvents(listener, ApplicationStartingEvent.class,
ApplicationEnvironmentPreparedEvent.class,
ApplicationPreparedEvent.class, ApplicationFailedEvent.class);
}
}
@SuppressWarnings("unchecked")
@Test
public void applicationListenerFromContextIsCalledWhenContextFailsRefreshBeforeListenerRegistration() {
final ApplicationListener<ApplicationEvent> listener = mock(
ApplicationListener.class);
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.addInitializers((applicationContext) -> applicationContext
.addApplicationListener(listener));
try {
application.run();
fail("Run should have failed with an ApplicationContextException");
}
catch (ApplicationContextException ex) {
verifyListenerEvents(listener, ApplicationFailedEvent.class);
}
}
@SuppressWarnings("unchecked")
@Test
public void applicationListenerFromContextIsCalledWhenContextFailsRefreshAfterListenerRegistration() {
ApplicationListener<ApplicationEvent> listener = mock(ApplicationListener.class);
SpringApplication application = new SpringApplication(
BrokenPostConstructConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
application.addInitializers((applicationContext) -> applicationContext
.addApplicationListener(listener));
try {
application.run();
fail("Run should have failed with a BeanCreationException");
}
catch (BeanCreationException ex) {
verifyListenerEvents(listener, ApplicationFailedEvent.class);
}
}
@Test
public void registerShutdownHookOff() throws Exception {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application.setApplicationContextClass(SpyApplicationContext.class);
application.setRegisterShutdownHook(false);
this.context = application.run();
SpyApplicationContext applicationContext = (SpyApplicationContext) this.context;
verify(applicationContext.getApplicationContext(), never())
.registerShutdownHook();
}
@Test
public void headless() throws Exception {
TestSpringApplication application = new TestSpringApplication(
ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run();
assertThat(System.getProperty("java.awt.headless")).isEqualTo("true");
}
@Test
public void headlessFalse() throws Exception {
TestSpringApplication application = new TestSpringApplication(
ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
application.setHeadless(false);
this.context = application.run();
assertThat(System.getProperty("java.awt.headless")).isEqualTo("false");
}
@Test
public void headlessSystemPropertyTakesPrecedence() throws Exception {
System.setProperty("java.awt.headless", "false");
TestSpringApplication application = new TestSpringApplication(
ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run();
assertThat(System.getProperty("java.awt.headless")).isEqualTo("false");
}
@Test
public void getApplicationArgumentsBean() throws Exception {
TestSpringApplication application = new TestSpringApplication(
ExampleConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
this.context = application.run("--debug", "spring", "boot");
ApplicationArguments args = this.context.getBean(ApplicationArguments.class);
assertThat(args.getNonOptionArgs()).containsExactly("spring", "boot");
assertThat(args.containsOption("debug")).isTrue();
}
@Test
public void webApplicationSwitchedOffInListener() throws Exception {
TestSpringApplication application = new TestSpringApplication(
ExampleConfig.class);
application.addListeners(
(ApplicationListener<ApplicationEnvironmentPreparedEvent>) (event) -> {
Assertions.assertThat(event.getEnvironment())
.isInstanceOf(StandardServletEnvironment.class);
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(
event.getEnvironment(), "foo=bar");
event.getSpringApplication()
.setWebApplicationType(WebApplicationType.NONE);
});
this.context = application.run();
assertThat(this.context.getEnvironment())
.isNotInstanceOf(StandardServletEnvironment.class);
assertThat(this.context.getEnvironment().getProperty("foo")).isEqualTo("bar");
Iterator<PropertySource<?>> iterator = this.context.getEnvironment()
.getPropertySources().iterator();
assertThat(iterator.next().getName()).isEqualTo("configurationProperties");
assertThat(iterator.next().getName()).isEqualTo(
TestPropertySourceUtils.INLINED_PROPERTIES_PROPERTY_SOURCE_NAME);
}
@Test
public void nonWebApplicationConfiguredViaAPropertyHasTheCorrectTypeOfContextAndEnvironment() {
ConfigurableApplicationContext context = new SpringApplication(
ExampleConfig.class).run("--spring.main.web-application-type=NONE");
assertThat(context).isNotInstanceOfAny(WebApplicationContext.class,
ReactiveWebApplicationContext.class);
assertThat(context.getEnvironment())
.isNotInstanceOfAny(ConfigurableWebEnvironment.class);
}
@Test
public void failureResultsInSingleStackTrace() throws Exception {
ThreadGroup group = new ThreadGroup("main");
Thread thread = new Thread(group, "main") {
@Override
public void run() {
SpringApplication application = new SpringApplication(
FailingConfig.class);
application.setWebApplicationType(WebApplicationType.NONE);
application.run();
};
};
thread.start();
thread.join(6000);
int occurrences = StringUtils.countOccurrencesOf(this.output.toString(),
"Caused by: java.lang.RuntimeException: ExpectedError");
assertThat(occurrences).as("Expected single stacktrace").isEqualTo(1);
}
private Condition<ConfigurableEnvironment> matchingPropertySource(
final Class<?> propertySourceClass, final String name) {
return new Condition<ConfigurableEnvironment>("has property source") {
@Override
public boolean matches(ConfigurableEnvironment value) {
for (PropertySource<?> source : value.getPropertySources()) {
if (propertySourceClass.isInstance(source)
&& (name == null || name.equals(source.getName()))) {
return true;
}
}
return false;
}
};
}
private Condition<ConfigurableApplicationContext> runTestRunnerBean(
final String name) {
return new Condition<ConfigurableApplicationContext>("run testrunner bean") {
@Override
public boolean matches(ConfigurableApplicationContext value) {
return value.getBean(name, AbstractTestRunner.class).hasRun();
}
};
}
@Configuration
protected static class InaccessibleConfiguration {
private InaccessibleConfiguration() {
}
}
public static class SpyApplicationContext extends AnnotationConfigApplicationContext {
ConfigurableApplicationContext applicationContext = spy(
new AnnotationConfigApplicationContext());
@Override
public void registerShutdownHook() {
this.applicationContext.registerShutdownHook();
}
public ConfigurableApplicationContext getApplicationContext() {
return this.applicationContext;
}
@Override
public void close() {
this.applicationContext.close();
}
}
private static class TestSpringApplication extends SpringApplication {
private BeanDefinitionLoader loader;
private boolean useMockLoader;
private Banner.Mode bannerMode;
TestSpringApplication(Class<?>... primarySources) {
super(primarySources);
}
TestSpringApplication(ResourceLoader resourceLoader, Class<?>... primarySources) {
super(resourceLoader, primarySources);
}
public void setUseMockLoader(boolean useMockLoader) {
this.useMockLoader = useMockLoader;
}
@Override
protected BeanDefinitionLoader createBeanDefinitionLoader(
BeanDefinitionRegistry registry, Object[] sources) {
if (this.useMockLoader) {
this.loader = mock(BeanDefinitionLoader.class);
}
else {
this.loader = spy(super.createBeanDefinitionLoader(registry, sources));
}
return this.loader;
}
public BeanDefinitionLoader getLoader() {
return this.loader;
}
@Override
public void setBannerMode(Banner.Mode bannerMode) {
super.setBannerMode(bannerMode);
this.bannerMode = bannerMode;
}
public Banner.Mode getBannerMode() {
return this.bannerMode;
}
}
@Configuration
static class ExampleConfig {
}
@Configuration
static class BrokenPostConstructConfig {
@Bean
public Thing thing() {
return new Thing();
}
static class Thing {
@PostConstruct
public void boom() {
throw new IllegalStateException();
}
}
}
@Configuration
static class ListenerConfig {
@Bean
public ApplicationListener<?> testApplicationListener() {
return mock(ApplicationListener.class);
}
}
@Configuration
static class Multicaster {
@Bean(name = AbstractApplicationContext.APPLICATION_EVENT_MULTICASTER_BEAN_NAME)
public ApplicationEventMulticaster applicationEventMulticaster() {
return spy(new SimpleApplicationEventMulticaster());
}
}
@Configuration
static class ExampleWebConfig {
@Bean
public TomcatServletWebServerFactory webServer() {
return new TomcatServletWebServerFactory(0);
}
}
@Configuration
static class ExampleReactiveWebConfig {
@Bean
public NettyReactiveWebServerFactory webServerFactory() {
return new NettyReactiveWebServerFactory(0);
}
@Bean
public HttpHandler httpHandler() {
return (serverHttpRequest, serverHttpResponse) -> Mono.empty();
}
}
@Configuration
static class FailingConfig {
@Bean
public Object fail() {
throw new RuntimeException("ExpectedError");
}
}
@Configuration
static class CommandLineRunConfig {
@Bean
public TestCommandLineRunner runnerC() {
return new TestCommandLineRunner(Ordered.LOWEST_PRECEDENCE, "runnerB",
"runnerA");
}
@Bean
public TestApplicationRunner runnerB() {
return new TestApplicationRunner(Ordered.LOWEST_PRECEDENCE - 1, "runnerA");
}
@Bean
public TestCommandLineRunner runnerA() {
return new TestCommandLineRunner(Ordered.HIGHEST_PRECEDENCE);
}
}
@Configuration
static class ExitCodeCommandLineRunConfig {
@Bean
public CommandLineRunner runner() {
return (args) -> {
throw new IllegalStateException(new ExitStatusException());
};
}
}
@Configuration
static class MappedExitCodeCommandLineRunConfig {
@Bean
public CommandLineRunner runner() {
return (args) -> {
throw new IllegalStateException();
};
}
@Bean
public ExitCodeExceptionMapper exceptionMapper() {
return (exception) -> {
if (exception instanceof IllegalStateException) {
return 11;
}
return 0;
};
}
}
@Configuration
static class RefreshFailureConfig {
@PostConstruct
public void fail() {
throw new RefreshFailureException();
}
}
static class ExitStatusException extends RuntimeException
implements ExitCodeGenerator {
@Override
public int getExitCode() {
return 11;
}
}
static class RefreshFailureException extends RuntimeException {
}
static class AbstractTestRunner implements ApplicationContextAware, Ordered {
private final String[] expectedBefore;
private ApplicationContext applicationContext;
private final int order;
private boolean run;
AbstractTestRunner(int order, String... expectedBefore) {
this.expectedBefore = expectedBefore;
this.order = order;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext)
throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public int getOrder() {
return this.order;
}
public void markAsRan() {
this.run = true;
for (String name : this.expectedBefore) {
AbstractTestRunner bean = this.applicationContext.getBean(name,
AbstractTestRunner.class);
assertThat(bean.hasRun()).isTrue();
}
}
public boolean hasRun() {
return this.run;
}
}
private static class TestCommandLineRunner extends AbstractTestRunner
implements CommandLineRunner {
TestCommandLineRunner(int order, String... expectedBefore) {
super(order, expectedBefore);
}
@Override
public void run(String... args) {
markAsRan();
}
}
private static class TestApplicationRunner extends AbstractTestRunner
implements ApplicationRunner {
TestApplicationRunner(int order, String... expectedBefore) {
super(order, expectedBefore);
}
@Override
public void run(ApplicationArguments args) {
markAsRan();
}
}
private static class ExitCodeListener implements ApplicationListener<ExitCodeEvent> {
private int exitCode;
@Override
public void onApplicationEvent(ExitCodeEvent event) {
this.exitCode = event.getExitCode();
}
public int getExitCode() {
return this.exitCode;
}
}
private static class MockResourceLoader implements ResourceLoader {
private final Map<String, Resource> resources = new HashMap<>();
public void addResource(String source, String path) {
this.resources.put(source, new ClassPathResource(path, getClass()));
}
@Override
public Resource getResource(String path) {
Resource resource = this.resources.get(path);
return (resource == null ? new ClassPathResource("doesnotexist") : resource);
}
@Override
public ClassLoader getClassLoader() {
return getClass().getClassLoader();
}
}
}
| |
package org.apache.lucene.codecs.memory;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
/**
* Reader for {@link DirectDocValuesFormat}
*/
class DirectDocValuesProducer extends DocValuesProducer {
// metadata maps (just file pointers and minimal stuff)
private final Map<String,NumericEntry> numerics = new HashMap<>();
private final Map<String,BinaryEntry> binaries = new HashMap<>();
private final Map<String,SortedEntry> sorteds = new HashMap<>();
private final Map<String,SortedSetEntry> sortedSets = new HashMap<>();
private final Map<String,SortedNumericEntry> sortedNumerics = new HashMap<>();
private final IndexInput data;
// ram instances we have already loaded
private final Map<String,NumericRawValues> numericInstances = new HashMap<>();
private final Map<String,BinaryRawValues> binaryInstances = new HashMap<>();
private final Map<String,SortedRawValues> sortedInstances = new HashMap<>();
private final Map<String,SortedSetRawValues> sortedSetInstances = new HashMap<>();
private final Map<String,SortedNumericRawValues> sortedNumericInstances = new HashMap<>();
private final Map<String,FixedBitSet> docsWithFieldInstances = new HashMap<>();
private final int numEntries;
private final int maxDoc;
private final AtomicLong ramBytesUsed;
private final int version;
private final boolean merging;
static final byte NUMBER = 0;
static final byte BYTES = 1;
static final byte SORTED = 2;
static final byte SORTED_SET = 3;
static final byte SORTED_SET_SINGLETON = 4;
static final byte SORTED_NUMERIC = 5;
static final byte SORTED_NUMERIC_SINGLETON = 6;
static final int VERSION_START = 3;
static final int VERSION_CURRENT = VERSION_START;
// clone for merge: when merging we don't do any instances.put()s
DirectDocValuesProducer(DirectDocValuesProducer original) throws IOException {
assert Thread.holdsLock(original);
numerics.putAll(original.numerics);
binaries.putAll(original.binaries);
sorteds.putAll(original.sorteds);
sortedSets.putAll(original.sortedSets);
sortedNumerics.putAll(original.sortedNumerics);
data = original.data.clone();
numericInstances.putAll(original.numericInstances);
binaryInstances.putAll(original.binaryInstances);
sortedInstances.putAll(original.sortedInstances);
sortedSetInstances.putAll(original.sortedSetInstances);
sortedNumericInstances.putAll(original.sortedNumericInstances);
docsWithFieldInstances.putAll(original.docsWithFieldInstances);
numEntries = original.numEntries;
maxDoc = original.maxDoc;
ramBytesUsed = new AtomicLong(original.ramBytesUsed.get());
version = original.version;
merging = true;
}
DirectDocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
maxDoc = state.segmentInfo.getDocCount();
merging = false;
String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
// read in the entries from the metadata file.
ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context);
ramBytesUsed = new AtomicLong(RamUsageEstimator.shallowSizeOfInstance(getClass()));
boolean success = false;
try {
version = CodecUtil.checkIndexHeader(in, metaCodec, VERSION_START, VERSION_CURRENT,
state.segmentInfo.getId(), state.segmentSuffix);
numEntries = readFields(in, state.fieldInfos);
CodecUtil.checkFooter(in);
success = true;
} finally {
if (success) {
IOUtils.close(in);
} else {
IOUtils.closeWhileHandlingException(in);
}
}
String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
this.data = state.directory.openInput(dataName, state.context);
success = false;
try {
final int version2 = CodecUtil.checkIndexHeader(data, dataCodec, VERSION_START, VERSION_CURRENT,
state.segmentInfo.getId(), state.segmentSuffix);
if (version != version2) {
throw new CorruptIndexException("Format versions mismatch: meta=" + version + ", data=" + version2, data);
}
// NOTE: data file is too costly to verify checksum against all the bytes on open,
// but for now we at least verify proper structure of the checksum footer: which looks
// for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
// such as file truncation.
CodecUtil.retrieveChecksum(data);
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(this.data);
}
}
}
private NumericEntry readNumericEntry(IndexInput meta) throws IOException {
NumericEntry entry = new NumericEntry();
entry.offset = meta.readLong();
entry.count = meta.readInt();
entry.missingOffset = meta.readLong();
if (entry.missingOffset != -1) {
entry.missingBytes = meta.readLong();
} else {
entry.missingBytes = 0;
}
entry.byteWidth = meta.readByte();
return entry;
}
private BinaryEntry readBinaryEntry(IndexInput meta) throws IOException {
BinaryEntry entry = new BinaryEntry();
entry.offset = meta.readLong();
entry.numBytes = meta.readInt();
entry.count = meta.readInt();
entry.missingOffset = meta.readLong();
if (entry.missingOffset != -1) {
entry.missingBytes = meta.readLong();
} else {
entry.missingBytes = 0;
}
return entry;
}
private SortedEntry readSortedEntry(IndexInput meta) throws IOException {
SortedEntry entry = new SortedEntry();
entry.docToOrd = readNumericEntry(meta);
entry.values = readBinaryEntry(meta);
return entry;
}
private SortedSetEntry readSortedSetEntry(IndexInput meta, boolean singleton) throws IOException {
SortedSetEntry entry = new SortedSetEntry();
if (singleton == false) {
entry.docToOrdAddress = readNumericEntry(meta);
}
entry.ords = readNumericEntry(meta);
entry.values = readBinaryEntry(meta);
return entry;
}
private SortedNumericEntry readSortedNumericEntry(IndexInput meta, boolean singleton) throws IOException {
SortedNumericEntry entry = new SortedNumericEntry();
if (singleton == false) {
entry.docToAddress = readNumericEntry(meta);
}
entry.values = readNumericEntry(meta);
return entry;
}
private int readFields(IndexInput meta, FieldInfos infos) throws IOException {
int numEntries = 0;
int fieldNumber = meta.readVInt();
while (fieldNumber != -1) {
numEntries++;
FieldInfo info = infos.fieldInfo(fieldNumber);
int fieldType = meta.readByte();
if (fieldType == NUMBER) {
numerics.put(info.name, readNumericEntry(meta));
} else if (fieldType == BYTES) {
binaries.put(info.name, readBinaryEntry(meta));
} else if (fieldType == SORTED) {
SortedEntry entry = readSortedEntry(meta);
sorteds.put(info.name, entry);
binaries.put(info.name, entry.values);
} else if (fieldType == SORTED_SET) {
SortedSetEntry entry = readSortedSetEntry(meta, false);
sortedSets.put(info.name, entry);
binaries.put(info.name, entry.values);
} else if (fieldType == SORTED_SET_SINGLETON) {
SortedSetEntry entry = readSortedSetEntry(meta, true);
sortedSets.put(info.name, entry);
binaries.put(info.name, entry.values);
} else if (fieldType == SORTED_NUMERIC) {
SortedNumericEntry entry = readSortedNumericEntry(meta, false);
sortedNumerics.put(info.name, entry);
} else if (fieldType == SORTED_NUMERIC_SINGLETON) {
SortedNumericEntry entry = readSortedNumericEntry(meta, true);
sortedNumerics.put(info.name, entry);
} else {
throw new CorruptIndexException("invalid entry type: " + fieldType + ", field= " + info.name, meta);
}
fieldNumber = meta.readVInt();
}
return numEntries;
}
@Override
public long ramBytesUsed() {
return ramBytesUsed.get();
}
@Override
public synchronized Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.addAll(Accountables.namedAccountables("numeric field", numericInstances));
resources.addAll(Accountables.namedAccountables("binary field", binaryInstances));
resources.addAll(Accountables.namedAccountables("sorted field", sortedInstances));
resources.addAll(Accountables.namedAccountables("sorted set field", sortedSetInstances));
resources.addAll(Accountables.namedAccountables("sorted numeric field", sortedNumericInstances));
resources.addAll(Accountables.namedAccountables("missing bitset field", docsWithFieldInstances));
return Collections.unmodifiableList(resources);
}
@Override
public String toString() {
return getClass().getSimpleName() + "(entries=" + numEntries + ")";
}
@Override
public void checkIntegrity() throws IOException {
CodecUtil.checksumEntireFile(data);
}
@Override
public synchronized NumericDocValues getNumeric(FieldInfo field) throws IOException {
NumericRawValues instance = numericInstances.get(field.name);
if (instance == null) {
// Lazy load
instance = loadNumeric(numerics.get(field.name));
if (!merging) {
numericInstances.put(field.name, instance);
ramBytesUsed.addAndGet(instance.ramBytesUsed());
}
}
return instance.numerics;
}
private NumericRawValues loadNumeric(NumericEntry entry) throws IOException {
NumericRawValues ret = new NumericRawValues();
data.seek(entry.offset + entry.missingBytes);
switch (entry.byteWidth) {
case 1:
{
final byte[] values = new byte[entry.count];
data.readBytes(values, 0, entry.count);
ret.bytesUsed = RamUsageEstimator.sizeOf(values);
ret.numerics = new NumericDocValues() {
@Override
public long get(int idx) {
return values[idx];
}
};
return ret;
}
case 2:
{
final short[] values = new short[entry.count];
for(int i=0;i<entry.count;i++) {
values[i] = data.readShort();
}
ret.bytesUsed = RamUsageEstimator.sizeOf(values);
ret.numerics = new NumericDocValues() {
@Override
public long get(int idx) {
return values[idx];
}
};
return ret;
}
case 4:
{
final int[] values = new int[entry.count];
for(int i=0;i<entry.count;i++) {
values[i] = data.readInt();
}
ret.bytesUsed = RamUsageEstimator.sizeOf(values);
ret.numerics = new NumericDocValues() {
@Override
public long get(int idx) {
return values[idx];
}
};
return ret;
}
case 8:
{
final long[] values = new long[entry.count];
for(int i=0;i<entry.count;i++) {
values[i] = data.readLong();
}
ret.bytesUsed = RamUsageEstimator.sizeOf(values);
ret.numerics = new NumericDocValues() {
@Override
public long get(int idx) {
return values[idx];
}
};
return ret;
}
default:
throw new AssertionError();
}
}
@Override
public synchronized BinaryDocValues getBinary(FieldInfo field) throws IOException {
BinaryRawValues instance = binaryInstances.get(field.name);
if (instance == null) {
// Lazy load
instance = loadBinary(binaries.get(field.name));
if (!merging) {
binaryInstances.put(field.name, instance);
ramBytesUsed.addAndGet(instance.ramBytesUsed());
}
}
final byte[] bytes = instance.bytes;
final int[] address = instance.address;
return new BinaryDocValues() {
final BytesRef term = new BytesRef();
@Override
public BytesRef get(int docID) {
term.bytes = bytes;
term.offset = address[docID];
term.length = address[docID+1] - term.offset;
return term;
}
};
}
private BinaryRawValues loadBinary(BinaryEntry entry) throws IOException {
data.seek(entry.offset);
final byte[] bytes = new byte[entry.numBytes];
data.readBytes(bytes, 0, entry.numBytes);
data.seek(entry.offset + entry.numBytes + entry.missingBytes);
final int[] address = new int[entry.count+1];
for(int i=0;i<entry.count;i++) {
address[i] = data.readInt();
}
address[entry.count] = data.readInt();
BinaryRawValues values = new BinaryRawValues();
values.bytes = bytes;
values.address = address;
return values;
}
@Override
public SortedDocValues getSorted(FieldInfo field) throws IOException {
final SortedEntry entry = sorteds.get(field.name);
SortedRawValues instance;
synchronized (this) {
instance = sortedInstances.get(field.name);
if (instance == null) {
// Lazy load
instance = loadSorted(field);
if (!merging) {
sortedInstances.put(field.name, instance);
ramBytesUsed.addAndGet(instance.ramBytesUsed());
}
}
}
return newSortedInstance(instance.docToOrd.numerics, getBinary(field), entry.values.count);
}
private SortedDocValues newSortedInstance(final NumericDocValues docToOrd, final BinaryDocValues values, final int count) {
return new SortedDocValues() {
@Override
public int getOrd(int docID) {
return (int) docToOrd.get(docID);
}
@Override
public BytesRef lookupOrd(int ord) {
return values.get(ord);
}
@Override
public int getValueCount() {
return count;
}
// Leave lookupTerm to super's binary search
// Leave termsEnum to super
};
}
private SortedRawValues loadSorted(FieldInfo field) throws IOException {
final SortedEntry entry = sorteds.get(field.name);
final NumericRawValues docToOrd = loadNumeric(entry.docToOrd);
final SortedRawValues values = new SortedRawValues();
values.docToOrd = docToOrd;
return values;
}
@Override
public synchronized SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
SortedNumericRawValues instance = sortedNumericInstances.get(field.name);
final SortedNumericEntry entry = sortedNumerics.get(field.name);
if (instance == null) {
// Lazy load
instance = loadSortedNumeric(entry);
if (!merging) {
sortedNumericInstances.put(field.name, instance);
ramBytesUsed.addAndGet(instance.ramBytesUsed());
}
}
if (entry.docToAddress == null) {
final NumericDocValues single = instance.values.numerics;
final Bits docsWithField = getMissingBits(field, entry.values.missingOffset, entry.values.missingBytes);
return DocValues.singleton(single, docsWithField);
} else {
final NumericDocValues docToAddress = instance.docToAddress.numerics;
final NumericDocValues values = instance.values.numerics;
return new SortedNumericDocValues() {
int valueStart;
int valueLimit;
@Override
public void setDocument(int doc) {
valueStart = (int) docToAddress.get(doc);
valueLimit = (int) docToAddress.get(doc+1);
}
@Override
public long valueAt(int index) {
return values.get(valueStart + index);
}
@Override
public int count() {
return valueLimit - valueStart;
}
};
}
}
private SortedNumericRawValues loadSortedNumeric(SortedNumericEntry entry) throws IOException {
SortedNumericRawValues instance = new SortedNumericRawValues();
if (entry.docToAddress != null) {
instance.docToAddress = loadNumeric(entry.docToAddress);
}
instance.values = loadNumeric(entry.values);
return instance;
}
@Override
public synchronized SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
SortedSetRawValues instance = sortedSetInstances.get(field.name);
final SortedSetEntry entry = sortedSets.get(field.name);
if (instance == null) {
// Lazy load
instance = loadSortedSet(entry);
if (!merging) {
sortedSetInstances.put(field.name, instance);
ramBytesUsed.addAndGet(instance.ramBytesUsed());
}
}
if (instance.docToOrdAddress == null) {
SortedDocValues sorted = newSortedInstance(instance.ords.numerics, getBinary(field), entry.values.count);
return DocValues.singleton(sorted);
} else {
final NumericDocValues docToOrdAddress = instance.docToOrdAddress.numerics;
final NumericDocValues ords = instance.ords.numerics;
final BinaryDocValues values = getBinary(field);
// Must make a new instance since the iterator has state:
return new RandomAccessOrds() {
int ordStart;
int ordUpto;
int ordLimit;
@Override
public long nextOrd() {
if (ordUpto == ordLimit) {
return NO_MORE_ORDS;
} else {
return ords.get(ordUpto++);
}
}
@Override
public void setDocument(int docID) {
ordStart = ordUpto = (int) docToOrdAddress.get(docID);
ordLimit = (int) docToOrdAddress.get(docID+1);
}
@Override
public BytesRef lookupOrd(long ord) {
return values.get((int) ord);
}
@Override
public long getValueCount() {
return entry.values.count;
}
@Override
public long ordAt(int index) {
return ords.get(ordStart + index);
}
@Override
public int cardinality() {
return ordLimit - ordStart;
}
// Leave lookupTerm to super's binary search
// Leave termsEnum to super
};
}
}
private SortedSetRawValues loadSortedSet(SortedSetEntry entry) throws IOException {
SortedSetRawValues instance = new SortedSetRawValues();
if (entry.docToOrdAddress != null) {
instance.docToOrdAddress = loadNumeric(entry.docToOrdAddress);
}
instance.ords = loadNumeric(entry.ords);
return instance;
}
private Bits getMissingBits(FieldInfo field, final long offset, final long length) throws IOException {
if (offset == -1) {
return new Bits.MatchAllBits(maxDoc);
} else {
FixedBitSet instance;
synchronized(this) {
instance = docsWithFieldInstances.get(field.name);
if (instance == null) {
IndexInput data = this.data.clone();
data.seek(offset);
assert length % 8 == 0;
long bits[] = new long[(int) length >> 3];
for (int i = 0; i < bits.length; i++) {
bits[i] = data.readLong();
}
instance = new FixedBitSet(bits, maxDoc);
if (!merging) {
docsWithFieldInstances.put(field.name, instance);
ramBytesUsed.addAndGet(instance.ramBytesUsed());
}
}
}
return instance;
}
}
@Override
public Bits getDocsWithField(FieldInfo field) throws IOException {
switch(field.getDocValuesType()) {
case SORTED_SET:
return DocValues.docsWithValue(getSortedSet(field), maxDoc);
case SORTED_NUMERIC:
return DocValues.docsWithValue(getSortedNumeric(field), maxDoc);
case SORTED:
return DocValues.docsWithValue(getSorted(field), maxDoc);
case BINARY:
BinaryEntry be = binaries.get(field.name);
return getMissingBits(field, be.missingOffset, be.missingBytes);
case NUMERIC:
NumericEntry ne = numerics.get(field.name);
return getMissingBits(field, ne.missingOffset, ne.missingBytes);
default:
throw new AssertionError();
}
}
@Override
public synchronized DocValuesProducer getMergeInstance() throws IOException {
return new DirectDocValuesProducer(this);
}
@Override
public void close() throws IOException {
data.close();
}
static class BinaryRawValues implements Accountable {
byte[] bytes;
int[] address;
@Override
public long ramBytesUsed() {
long bytesUsed = RamUsageEstimator.sizeOf(bytes);
if (address != null) {
bytesUsed += RamUsageEstimator.sizeOf(address);
}
return bytesUsed;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
if (address != null) {
resources.add(Accountables.namedAccountable("addresses", RamUsageEstimator.sizeOf(address)));
}
resources.add(Accountables.namedAccountable("bytes", RamUsageEstimator.sizeOf(bytes)));
return Collections.unmodifiableList(resources);
}
@Override
public String toString() {
return getClass().getSimpleName();
}
}
static class NumericRawValues implements Accountable {
NumericDocValues numerics;
long bytesUsed;
@Override
public long ramBytesUsed() {
return bytesUsed;
}
@Override
public Collection<Accountable> getChildResources() {
return Collections.emptyList();
}
@Override
public String toString() {
return getClass().getSimpleName();
}
}
static class SortedRawValues implements Accountable {
NumericRawValues docToOrd;
@Override
public long ramBytesUsed() {
return docToOrd.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
return docToOrd.getChildResources();
}
@Override
public String toString() {
return getClass().getSimpleName();
}
}
static class SortedNumericRawValues implements Accountable {
NumericRawValues docToAddress;
NumericRawValues values;
@Override
public long ramBytesUsed() {
long bytesUsed = values.ramBytesUsed();
if (docToAddress != null) {
bytesUsed += docToAddress.ramBytesUsed();
}
return bytesUsed;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
if (docToAddress != null) {
resources.add(Accountables.namedAccountable("addresses", docToAddress));
}
resources.add(Accountables.namedAccountable("values", values));
return Collections.unmodifiableList(resources);
}
@Override
public String toString() {
return getClass().getSimpleName();
}
}
static class SortedSetRawValues implements Accountable {
NumericRawValues docToOrdAddress;
NumericRawValues ords;
@Override
public long ramBytesUsed() {
long bytesUsed = ords.ramBytesUsed();
if (docToOrdAddress != null) {
bytesUsed += docToOrdAddress.ramBytesUsed();
}
return bytesUsed;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
if (docToOrdAddress != null) {
resources.add(Accountables.namedAccountable("addresses", docToOrdAddress));
}
resources.add(Accountables.namedAccountable("ordinals", ords));
return Collections.unmodifiableList(resources);
}
@Override
public String toString() {
return getClass().getSimpleName();
}
}
static class NumericEntry {
long offset;
int count;
long missingOffset;
long missingBytes;
byte byteWidth;
int packedIntsVersion;
}
static class BinaryEntry {
long offset;
long missingOffset;
long missingBytes;
int count;
int numBytes;
int minLength;
int maxLength;
int packedIntsVersion;
int blockSize;
}
static class SortedEntry {
NumericEntry docToOrd;
BinaryEntry values;
}
static class SortedSetEntry {
NumericEntry docToOrdAddress;
NumericEntry ords;
BinaryEntry values;
}
static class SortedNumericEntry {
NumericEntry docToAddress;
NumericEntry values;
}
static class FSTEntry {
long offset;
long numOrds;
}
}
| |
/* Generic definitions */
/* Assertions (useful to generate conditional code) */
/* Current type and class (and size, if applicable) */
/* Value methods */
/* Interfaces (keys) */
/* Interfaces (values) */
/* Abstract implementations (keys) */
/* Abstract implementations (values) */
/* Static containers (keys) */
/* Static containers (values) */
/* Implementations */
/* Synchronized wrappers */
/* Unmodifiable wrappers */
/* Other wrappers */
/* Methods (keys) */
/* Methods (values) */
/* Methods (keys/values) */
/* Methods that have special names depending on keys (but the special names depend on values) */
/* Equality */
/* Object/Reference-only definitions (keys) */
/* Primitive-type-only definitions (keys) */
/* Object/Reference-only definitions (values) */
/*
* Copyright (C) 2007-2013 Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unimi.dsi.fastutil.longs;
import java.util.Map;
import java.util.NoSuchElementException;
import it.unimi.dsi.fastutil.objects.AbstractObjectIterator;
import it.unimi.dsi.fastutil.objects.AbstractObjectSet;
import it.unimi.dsi.fastutil.objects.ObjectIterator;
import it.unimi.dsi.fastutil.objects.ObjectCollection;
import it.unimi.dsi.fastutil.objects.ObjectCollections;
import it.unimi.dsi.fastutil.objects.ObjectArraySet;
import it.unimi.dsi.fastutil.objects.ObjectArrays;
/** A simple, brute-force implementation of a map based on two parallel backing arrays.
*
* <p>The main purpose of this
* implementation is that of wrapping cleanly the brute-force approach to the storage of a very
* small number of pairs: just put them into two parallel arrays and scan linearly to find an item.
*/
public class Long2ObjectArrayMap <V> extends AbstractLong2ObjectMap <V> implements java.io.Serializable, Cloneable {
private static final long serialVersionUID = 1L;
/** The keys (valid up to {@link #size}, excluded). */
private transient long[] key;
/** The values (parallel to {@link #key}). */
private transient Object[] value;
/** The number of valid entries in {@link #key} and {@link #value}. */
private int size;
/** Creates a new empty array map with given key and value backing arrays. The resulting map will have as many entries as the given arrays.
*
* <p>It is responsibility of the caller that the elements of <code>key</code> are distinct.
*
* @param key the key array.
* @param value the value array (it <em>must</em> have the same length as <code>key</code>).
*/
public Long2ObjectArrayMap( final long[] key, final Object[] value ) {
this.key = key;
this.value = value;
size = key.length;
if( key.length != value.length ) throw new IllegalArgumentException( "Keys and values have different lengths (" + key.length + ", " + value.length + ")" );
}
/** Creates a new empty array map.
*/
public Long2ObjectArrayMap() {
this.key = LongArrays.EMPTY_ARRAY;
this.value = ObjectArrays.EMPTY_ARRAY;
}
/** Creates a new empty array map of given capacity.
*
* @param capacity the initial capacity.
*/
public Long2ObjectArrayMap( final int capacity ) {
this.key = new long[ capacity ];
this.value = new Object[ capacity ];
}
/** Creates a new empty array map copying the entries of a given map.
*
* @param m a map.
*/
public Long2ObjectArrayMap( final Long2ObjectMap <V> m ) {
this( m.size() );
putAll( m );
}
/** Creates a new empty array map copying the entries of a given map.
*
* @param m a map.
*/
public Long2ObjectArrayMap( final Map<? extends Long, ? extends V> m ) {
this( m.size() );
putAll( m );
}
/** Creates a new array map with given key and value backing arrays, using the given number of elements.
*
* <p>It is responsibility of the caller that the first <code>size</code> elements of <code>key</code> are distinct.
*
* @param key the key array.
* @param value the value array (it <em>must</em> have the same length as <code>key</code>).
* @param size the number of valid elements in <code>key</code> and <code>value</code>.
*/
public Long2ObjectArrayMap( final long[] key, final Object[] value, final int size ) {
this.key = key;
this.value = value;
this.size = size;
if( key.length != value.length ) throw new IllegalArgumentException( "Keys and values have different lengths (" + key.length + ", " + value.length + ")" );
if ( size > key.length ) throw new IllegalArgumentException( "The provided size (" + size + ") is larger than or equal to the backing-arrays size (" + key.length + ")" );
}
private final class EntrySet extends AbstractObjectSet<Long2ObjectMap.Entry <V> > implements FastEntrySet <V> {
@Override
public ObjectIterator<Long2ObjectMap.Entry <V> > iterator() {
return new AbstractObjectIterator<Long2ObjectMap.Entry <V> >() {
int next = 0;
public boolean hasNext() {
return next < size;
}
@SuppressWarnings("unchecked")
public Entry <V> next() {
if ( ! hasNext() ) throw new NoSuchElementException();
return new AbstractLong2ObjectMap.BasicEntry <V>( key[ next ], (V) value[ next++ ] );
}
};
}
public ObjectIterator<Long2ObjectMap.Entry <V> > fastIterator() {
return new AbstractObjectIterator<Long2ObjectMap.Entry <V> >() {
int next = 0;
final BasicEntry <V> entry = new BasicEntry <V> ( ((long)0), (null) );
public boolean hasNext() {
return next < size;
}
@SuppressWarnings("unchecked")
public Entry <V> next() {
if ( ! hasNext() ) throw new NoSuchElementException();
entry.key = key[ next ];
entry.value = (V) value[ next++ ];
return entry;
}
};
}
public int size() {
return size;
}
@SuppressWarnings("unchecked")
public boolean contains( Object o ) {
if ( ! ( o instanceof Map.Entry ) ) return false;
final Map.Entry<Long, V> e = (Map.Entry<Long, V>)o;
final long k = ((e.getKey()).longValue());
return Long2ObjectArrayMap.this.containsKey( k ) && ( (Long2ObjectArrayMap.this.get( k )) == null ? ((e.getValue())) == null : (Long2ObjectArrayMap.this.get( k )).equals((e.getValue())) );
}
}
public FastEntrySet <V> long2ObjectEntrySet() {
return new EntrySet();
}
private int findKey( final long k ) {
final long[] key = this.key;
for( int i = size; i-- != 0; ) if ( ( (key[ i ]) == (k) ) ) return i;
return -1;
}
@SuppressWarnings("unchecked")
public V get( final long k ) {
final long[] key = this.key;
for( int i = size; i-- != 0; ) if ( ( (key[ i ]) == (k) ) ) return (V) value[ i ];
return defRetValue;
}
public int size() {
return size;
}
@Override
public void clear() {
for( int i = size; i-- != 0; ) {
value[ i ] = null;
}
size = 0;
}
@Override
public boolean containsKey( final long k ) {
return findKey( k ) != -1;
}
@Override
@SuppressWarnings("unchecked")
public boolean containsValue( Object v ) {
for( int i = size; i-- != 0; ) if ( ( (value[ i ]) == null ? (v) == null : (value[ i ]).equals(v) ) ) return true;
return false;
}
@Override
public boolean isEmpty() {
return size == 0;
}
@Override
@SuppressWarnings("unchecked")
public V put( long k, V v ) {
final int oldKey = findKey( k );
if ( oldKey != -1 ) {
final V oldValue = (V) value[ oldKey ];
value[ oldKey ] = v;
return oldValue;
}
if ( size == key.length ) {
final long[] newKey = new long[ size == 0 ? 2 : size * 2 ];
final Object[] newValue = new Object[ size == 0 ? 2 : size * 2 ];
for( int i = size; i-- != 0; ) {
newKey[ i ] = key[ i ];
newValue[ i ] = value[ i ];
}
key = newKey;
value = newValue;
}
key[ size ] = k;
value[ size ] = v;
size++;
return defRetValue;
}
@Override
@SuppressWarnings("unchecked")
public V remove( final long k ) {
final int oldPos = findKey( k );
if ( oldPos == -1 ) return defRetValue;
final V oldValue = (V) value[ oldPos ];
final int tail = size - oldPos - 1;
for( int i = 0; i < tail; i++ ) {
key[ oldPos + i ] = key[ oldPos + i + 1 ];
value[ oldPos + i ] = value[ oldPos + i + 1 ];
}
size--;
value[ size ] = null;
return oldValue;
}
@Override
@SuppressWarnings("unchecked")
public LongSet keySet() {
return new LongArraySet ( key, size );
}
@Override
public ObjectCollection <V> values() {
return ObjectCollections.unmodifiable( new ObjectArraySet <V>( value, size ) );
}
/** Returns a deep copy of this map.
*
* <P>This method performs a deep copy of this hash map; the data stored in the
* map, however, is not cloned. Note that this makes a difference only for object keys.
*
* @return a deep copy of this map.
*/
@SuppressWarnings("unchecked")
public Long2ObjectArrayMap <V> clone() {
Long2ObjectArrayMap <V> c;
try {
c = (Long2ObjectArrayMap <V>)super.clone();
}
catch(CloneNotSupportedException cantHappen) {
throw new InternalError();
}
c.key = key.clone();
c.value = value.clone();
return c;
}
private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
s.defaultWriteObject();
for( int i = 0; i < size; i++ ) {
s.writeLong( key[ i ] );
s.writeObject( value[ i ] );
}
}
@SuppressWarnings("unchecked")
private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
key = new long[ size ];
value = new Object[ size ];
for( int i = 0; i < size; i++ ) {
key[ i ] = s.readLong();
value[ i ] = s.readObject();
}
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2020_06_01;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Description of metrics specification.
*/
public class MetricSpecification {
/**
* The name of the metric.
*/
@JsonProperty(value = "name")
private String name;
/**
* The display name of the metric.
*/
@JsonProperty(value = "displayName")
private String displayName;
/**
* The description of the metric.
*/
@JsonProperty(value = "displayDescription")
private String displayDescription;
/**
* Units the metric to be displayed in.
*/
@JsonProperty(value = "unit")
private String unit;
/**
* The aggregation type.
*/
@JsonProperty(value = "aggregationType")
private String aggregationType;
/**
* List of availability.
*/
@JsonProperty(value = "availabilities")
private List<Availability> availabilities;
/**
* Whether regional MDM account enabled.
*/
@JsonProperty(value = "enableRegionalMdmAccount")
private Boolean enableRegionalMdmAccount;
/**
* Whether gaps would be filled with zeros.
*/
@JsonProperty(value = "fillGapWithZero")
private Boolean fillGapWithZero;
/**
* Pattern for the filter of the metric.
*/
@JsonProperty(value = "metricFilterPattern")
private String metricFilterPattern;
/**
* List of dimensions.
*/
@JsonProperty(value = "dimensions")
private List<Dimension> dimensions;
/**
* Whether the metric is internal.
*/
@JsonProperty(value = "isInternal")
private Boolean isInternal;
/**
* The source MDM account.
*/
@JsonProperty(value = "sourceMdmAccount")
private String sourceMdmAccount;
/**
* The source MDM namespace.
*/
@JsonProperty(value = "sourceMdmNamespace")
private String sourceMdmNamespace;
/**
* The resource Id dimension name override.
*/
@JsonProperty(value = "resourceIdDimensionNameOverride")
private String resourceIdDimensionNameOverride;
/**
* Get the name of the metric.
*
* @return the name value
*/
public String name() {
return this.name;
}
/**
* Set the name of the metric.
*
* @param name the name value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withName(String name) {
this.name = name;
return this;
}
/**
* Get the display name of the metric.
*
* @return the displayName value
*/
public String displayName() {
return this.displayName;
}
/**
* Set the display name of the metric.
*
* @param displayName the displayName value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withDisplayName(String displayName) {
this.displayName = displayName;
return this;
}
/**
* Get the description of the metric.
*
* @return the displayDescription value
*/
public String displayDescription() {
return this.displayDescription;
}
/**
* Set the description of the metric.
*
* @param displayDescription the displayDescription value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withDisplayDescription(String displayDescription) {
this.displayDescription = displayDescription;
return this;
}
/**
* Get units the metric to be displayed in.
*
* @return the unit value
*/
public String unit() {
return this.unit;
}
/**
* Set units the metric to be displayed in.
*
* @param unit the unit value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withUnit(String unit) {
this.unit = unit;
return this;
}
/**
* Get the aggregation type.
*
* @return the aggregationType value
*/
public String aggregationType() {
return this.aggregationType;
}
/**
* Set the aggregation type.
*
* @param aggregationType the aggregationType value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withAggregationType(String aggregationType) {
this.aggregationType = aggregationType;
return this;
}
/**
* Get list of availability.
*
* @return the availabilities value
*/
public List<Availability> availabilities() {
return this.availabilities;
}
/**
* Set list of availability.
*
* @param availabilities the availabilities value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withAvailabilities(List<Availability> availabilities) {
this.availabilities = availabilities;
return this;
}
/**
* Get whether regional MDM account enabled.
*
* @return the enableRegionalMdmAccount value
*/
public Boolean enableRegionalMdmAccount() {
return this.enableRegionalMdmAccount;
}
/**
* Set whether regional MDM account enabled.
*
* @param enableRegionalMdmAccount the enableRegionalMdmAccount value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withEnableRegionalMdmAccount(Boolean enableRegionalMdmAccount) {
this.enableRegionalMdmAccount = enableRegionalMdmAccount;
return this;
}
/**
* Get whether gaps would be filled with zeros.
*
* @return the fillGapWithZero value
*/
public Boolean fillGapWithZero() {
return this.fillGapWithZero;
}
/**
* Set whether gaps would be filled with zeros.
*
* @param fillGapWithZero the fillGapWithZero value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withFillGapWithZero(Boolean fillGapWithZero) {
this.fillGapWithZero = fillGapWithZero;
return this;
}
/**
* Get pattern for the filter of the metric.
*
* @return the metricFilterPattern value
*/
public String metricFilterPattern() {
return this.metricFilterPattern;
}
/**
* Set pattern for the filter of the metric.
*
* @param metricFilterPattern the metricFilterPattern value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withMetricFilterPattern(String metricFilterPattern) {
this.metricFilterPattern = metricFilterPattern;
return this;
}
/**
* Get list of dimensions.
*
* @return the dimensions value
*/
public List<Dimension> dimensions() {
return this.dimensions;
}
/**
* Set list of dimensions.
*
* @param dimensions the dimensions value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withDimensions(List<Dimension> dimensions) {
this.dimensions = dimensions;
return this;
}
/**
* Get whether the metric is internal.
*
* @return the isInternal value
*/
public Boolean isInternal() {
return this.isInternal;
}
/**
* Set whether the metric is internal.
*
* @param isInternal the isInternal value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withIsInternal(Boolean isInternal) {
this.isInternal = isInternal;
return this;
}
/**
* Get the source MDM account.
*
* @return the sourceMdmAccount value
*/
public String sourceMdmAccount() {
return this.sourceMdmAccount;
}
/**
* Set the source MDM account.
*
* @param sourceMdmAccount the sourceMdmAccount value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withSourceMdmAccount(String sourceMdmAccount) {
this.sourceMdmAccount = sourceMdmAccount;
return this;
}
/**
* Get the source MDM namespace.
*
* @return the sourceMdmNamespace value
*/
public String sourceMdmNamespace() {
return this.sourceMdmNamespace;
}
/**
* Set the source MDM namespace.
*
* @param sourceMdmNamespace the sourceMdmNamespace value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withSourceMdmNamespace(String sourceMdmNamespace) {
this.sourceMdmNamespace = sourceMdmNamespace;
return this;
}
/**
* Get the resource Id dimension name override.
*
* @return the resourceIdDimensionNameOverride value
*/
public String resourceIdDimensionNameOverride() {
return this.resourceIdDimensionNameOverride;
}
/**
* Set the resource Id dimension name override.
*
* @param resourceIdDimensionNameOverride the resourceIdDimensionNameOverride value to set
* @return the MetricSpecification object itself.
*/
public MetricSpecification withResourceIdDimensionNameOverride(String resourceIdDimensionNameOverride) {
this.resourceIdDimensionNameOverride = resourceIdDimensionNameOverride;
return this;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.settings.IndexSettings;
import java.util.*;
/**
* The primary shard allocator allocates primary shard that were not created as
* a result of an API to a node that held them last to be recovered.
*/
public abstract class PrimaryShardAllocator extends AbstractComponent {
public static final String INDEX_RECOVERY_INITIAL_SHARDS = "index.recovery.initial_shards";
private final String initialShards;
public PrimaryShardAllocator(Settings settings) {
super(settings);
this.initialShards = settings.get("gateway.initial_shards", settings.get("gateway.local.initial_shards", "quorum"));
logger.debug("using initial_shards [{}]", initialShards);
}
public boolean allocateUnassigned(RoutingAllocation allocation) {
boolean changed = false;
final RoutingNodes routingNodes = allocation.routingNodes();
final MetaData metaData = routingNodes.metaData();
final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator();
while (unassignedIterator.hasNext()) {
ShardRouting shard = unassignedIterator.next();
if (needToFindPrimaryCopy(shard, routingNodes.routingTable().index(shard.index()).shard(shard.id())) == false) {
continue;
}
AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> shardState = fetchData(shard, allocation);
if (shardState.hasData() == false) {
logger.trace("{}: ignoring allocation, still fetching shard started state", shard);
unassignedIterator.removeAndIgnore();
continue;
}
IndexMetaData indexMetaData = metaData.index(shard.getIndex());
NodesAndVersions nodesAndVersions = buildNodesAndVersions(shard, recoverOnAnyNode(indexMetaData.settings()), allocation.getIgnoreNodes(shard.shardId()), shardState);
logger.debug("[{}][{}] found {} allocations of {}, highest version: [{}]", shard.index(), shard.id(), nodesAndVersions.allocationsFound, shard, nodesAndVersions.highestVersion);
if (isEnoughAllocationsFound(shard, indexMetaData, nodesAndVersions) == false) {
// if we are restoring this shard we still can allocate
if (shard.restoreSource() == null) {
// we can't really allocate, so ignore it and continue
unassignedIterator.removeAndIgnore();
logger.debug("[{}][{}]: not allocating, number_of_allocated_shards_found [{}]", shard.index(), shard.id(), nodesAndVersions.allocationsFound);
} else {
logger.debug("[{}][{}]: missing local data, will restore from [{}]", shard.index(), shard.id(), shard.restoreSource());
}
continue;
}
NodesToAllocate nodesToAllocate = buildNodesToAllocate(shard, allocation, nodesAndVersions);
if (nodesToAllocate.yesNodes.isEmpty() == false) {
DiscoveryNode node = nodesToAllocate.yesNodes.get(0);
logger.debug("[{}][{}]: allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, node);
changed = true;
unassignedIterator.initialize(node.id(), nodesAndVersions.highestVersion);
} else if (nodesToAllocate.throttleNodes.isEmpty() == true && nodesToAllocate.noNodes.isEmpty() == false) {
DiscoveryNode node = nodesToAllocate.noNodes.get(0);
logger.debug("[{}][{}]: forcing allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, node);
changed = true;
unassignedIterator.initialize(node.id(), nodesAndVersions.highestVersion);
} else {
// we are throttling this, but we have enough to allocate to this node, ignore it for now
logger.debug("[{}][{}]: throttling allocation [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, nodesToAllocate.throttleNodes);
unassignedIterator.removeAndIgnore();
}
}
return changed;
}
/**
* Does the shard need to find a primary copy?
*/
boolean needToFindPrimaryCopy(ShardRouting shard, IndexShardRoutingTable indexShardRoutingTable) {
if (shard.primary() == false) {
return false;
}
// this is an API allocation, ignore since we know there is no data...
if (indexShardRoutingTable.primaryAllocatedPostApi() == false) {
return false;
}
return true;
}
private boolean isEnoughAllocationsFound(ShardRouting shard, IndexMetaData indexMetaData, NodesAndVersions nodesAndVersions) {
// check if the counts meets the minimum set
int requiredAllocation = 1;
// if we restore from a repository one copy is more then enough
if (shard.restoreSource() == null) {
try {
String initialShards = indexMetaData.settings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards));
if ("quorum".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 1) {
requiredAllocation = ((1 + indexMetaData.numberOfReplicas()) / 2) + 1;
}
} else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 2) {
requiredAllocation = ((1 + indexMetaData.numberOfReplicas()) / 2);
}
} else if ("one".equals(initialShards)) {
requiredAllocation = 1;
} else if ("full".equals(initialShards) || "all".equals(initialShards)) {
requiredAllocation = indexMetaData.numberOfReplicas() + 1;
} else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 1) {
requiredAllocation = indexMetaData.numberOfReplicas();
}
} else {
requiredAllocation = Integer.parseInt(initialShards);
}
} catch (Exception e) {
logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard);
}
}
return nodesAndVersions.allocationsFound >= requiredAllocation;
}
/**
* Based on the nodes and versions, build the list of yes/no/throttle nodes that the shard applies to.
*/
private NodesToAllocate buildNodesToAllocate(ShardRouting shard, RoutingAllocation allocation, NodesAndVersions nodesAndVersions) {
List<DiscoveryNode> yesNodes = new ArrayList<>();
List<DiscoveryNode> throttledNodes = new ArrayList<>();
List<DiscoveryNode> noNodes = new ArrayList<>();
for (DiscoveryNode discoNode : nodesAndVersions.nodes) {
RoutingNode node = allocation.routingNodes().node(discoNode.id());
if (node == null) {
continue;
}
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.THROTTLE) {
throttledNodes.add(discoNode);
} else if (decision.type() == Decision.Type.NO) {
noNodes.add(discoNode);
} else {
yesNodes.add(discoNode);
}
}
return new NodesToAllocate(Collections.unmodifiableList(yesNodes), Collections.unmodifiableList(throttledNodes), Collections.unmodifiableList(noNodes));
}
/**
* Builds a list of nodes and version
*/
private NodesAndVersions buildNodesAndVersions(ShardRouting shard, boolean recoveryOnAnyNode, Set<String> ignoreNodes,
AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> shardState) {
final Map<DiscoveryNode, Long> nodesWithVersion = Maps.newHashMap();
int numberOfAllocationsFound = 0;
long highestVersion = -1;
for (TransportNodesListGatewayStartedShards.NodeGatewayStartedShards nodeShardState : shardState.getData().values()) {
long version = nodeShardState.version();
DiscoveryNode node = nodeShardState.getNode();
if (ignoreNodes.contains(node.id())) {
continue;
}
// -1 version means it does not exists, which is what the API returns, and what we expect to
if (nodeShardState.storeException() == null) {
logger.trace("[{}] on node [{}] has version [{}] of shard", shard, nodeShardState.getNode(), version);
} else {
// when there is an store exception, we disregard the reported version and assign it as -1 (same as shard does not exist)
logger.trace("[{}] on node [{}] has version [{}] but the store can not be opened, treating as version -1", nodeShardState.storeException(), shard, nodeShardState.getNode(), version);
version = -1;
}
if (recoveryOnAnyNode) {
numberOfAllocationsFound++;
if (version > highestVersion) {
highestVersion = version;
}
// We always put the node without clearing the map
nodesWithVersion.put(node, version);
} else if (version != -1) {
numberOfAllocationsFound++;
// If we've found a new "best" candidate, clear the
// current candidates and add it
if (version > highestVersion) {
highestVersion = version;
nodesWithVersion.clear();
nodesWithVersion.put(node, version);
} else if (version == highestVersion) {
// If the candidate is the same, add it to the
// list, but keep the current candidate
nodesWithVersion.put(node, version);
}
}
}
// Now that we have a map of nodes to versions along with the
// number of allocations found (and not ignored), we need to sort
// it so the node with the highest version is at the beginning
List<DiscoveryNode> nodesWithHighestVersion = Lists.newArrayList();
nodesWithHighestVersion.addAll(nodesWithVersion.keySet());
CollectionUtil.timSort(nodesWithHighestVersion, new Comparator<DiscoveryNode>() {
@Override
public int compare(DiscoveryNode o1, DiscoveryNode o2) {
return Long.compare(nodesWithVersion.get(o2), nodesWithVersion.get(o1));
}
});
if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder("[");
for (DiscoveryNode n : nodesWithVersion.keySet()) {
sb.append("[").append(n.getName()).append("]").append(" -> ").append(nodesWithVersion.get(n)).append(", ");
}
sb.append("]");
logger.trace("{} candidates for allocation: {}", shard, sb.toString());
}
return new NodesAndVersions(Collections.unmodifiableList(nodesWithHighestVersion), numberOfAllocationsFound, highestVersion);
}
/**
* Return {@code true} if the index is configured to allow shards to be
* recovered on any node
*/
private boolean recoverOnAnyNode(@IndexSettings Settings idxSettings) {
return IndexMetaData.isOnSharedFilesystem(idxSettings) &&
idxSettings.getAsBoolean(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false);
}
protected abstract AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> fetchData(ShardRouting shard, RoutingAllocation allocation);
static class NodesAndVersions {
public final List<DiscoveryNode> nodes;
public final int allocationsFound;
public final long highestVersion;
public NodesAndVersions(List<DiscoveryNode> nodes, int allocationsFound, long highestVersion) {
this.nodes = nodes;
this.allocationsFound = allocationsFound;
this.highestVersion = highestVersion;
}
}
static class NodesToAllocate {
final List<DiscoveryNode> yesNodes;
final List<DiscoveryNode> throttleNodes;
final List<DiscoveryNode> noNodes;
public NodesToAllocate(List<DiscoveryNode> yesNodes, List<DiscoveryNode> throttleNodes, List<DiscoveryNode> noNodes) {
this.yesNodes = yesNodes;
this.throttleNodes = throttleNodes;
this.noNodes = noNodes;
}
}
}
| |
/*
* Copyright (c) 2010-2017 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.model.intest;
import static com.evolveum.midpoint.test.IntegrationTestTools.display;
import static org.testng.AssertJUnit.assertNotNull;
import com.evolveum.midpoint.model.api.context.EvaluatedAssignmentTarget;
import com.evolveum.midpoint.model.test.AbstractModelIntegrationTest;
import com.evolveum.midpoint.prism.PrismContainer;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.PrismProperty;
import com.evolveum.midpoint.prism.PrismReferenceValue;
import com.evolveum.midpoint.prism.delta.ReferenceDelta;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.schema.PrismSchema;
import com.evolveum.midpoint.provisioning.ucf.impl.builtin.ManualConnectorInstance;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.internals.InternalsConfig;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.test.DummyResourceContoller;
import com.evolveum.midpoint.test.IntegrationTestTools;
import com.evolveum.midpoint.test.util.TestUtil;
import com.evolveum.midpoint.util.MiscUtil;
import com.evolveum.midpoint.util.QNameUtil;
import com.evolveum.midpoint.util.exception.CommunicationException;
import com.evolveum.midpoint.util.exception.ConfigurationException;
import com.evolveum.midpoint.util.exception.ExpressionEvaluationException;
import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException;
import com.evolveum.midpoint.util.exception.ObjectNotFoundException;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.util.exception.SecurityViolationException;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.common.common_3.RoleType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemObjectsType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.TaskType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType;
import org.testng.AssertJUnit;
import org.testng.IHookCallBack;
import org.testng.ITestResult;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import java.io.File;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.namespace.QName;
/**
* @author semancik
*
*/
public class AbstractConfiguredModelIntegrationTest extends AbstractModelIntegrationTest {
public static final File SYSTEM_CONFIGURATION_FILE = new File(COMMON_DIR, "system-configuration.xml");
public static final String SYSTEM_CONFIGURATION_OID = SystemObjectsType.SYSTEM_CONFIGURATION.value();
protected static final int NUMBER_OF_GLOBAL_POLICY_RULES = 3;
public static final File USER_ADMINISTRATOR_FILE = new File(COMMON_DIR, "user-administrator.xml");
protected static final String USER_ADMINISTRATOR_OID = "00000000-0000-0000-0000-000000000002";
protected static final String USER_ADMINISTRATOR_USERNAME = "administrator";
protected static final String USER_TEMPLATE_FILENAME = COMMON_DIR + "/user-template.xml";
protected static final String USER_TEMPLATE_OID = "10000000-0000-0000-0000-000000000002";
protected static final File USER_TEMPLATE_COMPLEX_FILE = new File(COMMON_DIR, "user-template-complex.xml");
protected static final String USER_TEMPLATE_COMPLEX_OID = "10000000-0000-0000-0000-000000000222";
protected static final String USER_TEMPLATE_INBOUNDS_FILENAME = COMMON_DIR + "/user-template-inbounds.xml";
protected static final String USER_TEMPLATE_INBOUNDS_OID = "10000000-0000-0000-0000-000000000555";
protected static final String USER_TEMPLATE_COMPLEX_INCLUDE_FILENAME = COMMON_DIR + "/user-template-complex-include.xml";
protected static final String USER_TEMPLATE_COMPLEX_INCLUDE_OID = "10000000-0000-0000-0000-000000000223";
protected static final String USER_TEMPLATE_SYNC_FILENAME = COMMON_DIR + "/user-template-sync.xml";
protected static final String USER_TEMPLATE_SYNC_OID = "10000000-0000-0000-0000-000000000333";
protected static final String USER_TEMPLATE_ORG_ASSIGNMENT_FILENAME = COMMON_DIR + "/user-template-org-assignment.xml";
protected static final String USER_TEMPLATE_ORG_ASSIGNMENT_OID = "10000000-0000-0000-0000-000000000444";
protected static final File OBJECT_TEMPLATE_PERSONA_ADMIN_FILE = new File(COMMON_DIR, "object-template-persona-admin.xml");
protected static final String OBJECT_TEMPLATE_PERSONA_ADMIN_OID = "894ea1a8-2c0a-11e7-a950-ff2047b0c053";
protected static final String CONNECTOR_LDAP_FILENAME = COMMON_DIR + "/connector-ldap.xml";
protected static final String CONNECTOR_DBTABLE_FILENAME = COMMON_DIR + "/connector-dbtable.xml";
protected static final String CONNECTOR_DUMMY_FILENAME = COMMON_DIR + "/connector-dummy.xml";
protected static final File RESOURCE_DUMMY_FILE = new File(COMMON_DIR, "resource-dummy.xml");
protected static final File RESOURCE_DUMMY_DEPRECATED_FILE = new File(COMMON_DIR, "resource-dummy-deprecated.xml");
protected static final File RESOURCE_DUMMY_CACHING_FILE = new File(COMMON_DIR, "resource-dummy-caching.xml");
protected static final String RESOURCE_DUMMY_OID = "10000000-0000-0000-0000-000000000004";
protected static final String RESOURCE_DUMMY_NAMESPACE = "http://midpoint.evolveum.com/xml/ns/public/resource/instance/10000000-0000-0000-0000-000000000004";
protected static final String RESOURCE_DUMMY_DRINK = "rum";
protected static final String RESOURCE_DUMMY_QUOTE = "Arr!";
protected static final String RESOURCE_DUMMY_USELESS_STRING = "USEless";
// RED resource has STRONG mappings
protected static final File RESOURCE_DUMMY_RED_FILE = new File(COMMON_DIR, "resource-dummy-red.xml");
protected static final String RESOURCE_DUMMY_RED_OID = "10000000-0000-0000-0000-000000000104";
protected static final String RESOURCE_DUMMY_RED_NAME = "red";
protected static final String RESOURCE_DUMMY_RED_NAMESPACE = MidPointConstants.NS_RI;
protected static final String RESOURCE_DUMMY_RED_USELESS_STRING = IntegrationTestTools.CONST_USELESS;
// BLUE resource has WEAK mappings, outbound/inbound
protected static final File RESOURCE_DUMMY_BLUE_FILE = new File(COMMON_DIR, "resource-dummy-blue.xml");
protected static final File RESOURCE_DUMMY_BLUE_DEPRECATED_FILE = new File(COMMON_DIR, "resource-dummy-blue-deprecated.xml");
protected static final File RESOURCE_DUMMY_BLUE_CACHING_FILE = new File(COMMON_DIR, "resource-dummy-blue-caching.xml");
protected static final String RESOURCE_DUMMY_BLUE_OID = "10000000-0000-0000-0000-000000000204";
protected static final String RESOURCE_DUMMY_BLUE_NAME = "blue";
protected static final String RESOURCE_DUMMY_BLUE_NAMESPACE = MidPointConstants.NS_RI;
// CYAN has WEAK mappings, outbound only
protected static final File RESOURCE_DUMMY_CYAN_FILE = new File(COMMON_DIR, "resource-dummy-cyan.xml");
protected static final String RESOURCE_DUMMY_CYAN_OID = "10000000-0000-0000-0000-00000000c204";
protected static final String RESOURCE_DUMMY_CYAN_NAME = "cyan";
protected static final String RESOURCE_DUMMY_CYAN_NAMESPACE = MidPointConstants.NS_RI;
// WHITE dummy resource has almost no configuration: no schema, no schemahandling, no synchronization, ...
protected static final String RESOURCE_DUMMY_WHITE_FILENAME = COMMON_DIR + "/resource-dummy-white.xml";
protected static final String RESOURCE_DUMMY_WHITE_OID = "10000000-0000-0000-0000-000000000304";
protected static final String RESOURCE_DUMMY_WHITE_NAME = "white";
protected static final String RESOURCE_DUMMY_WHITE_NAMESPACE = MidPointConstants.NS_RI;
// YELLOW dummy resource is almost the same as default one but with strong asIs administrativeStatus mapping
// it also has minimal password length
protected static final File RESOURCE_DUMMY_YELLOW_FILE = new File(COMMON_DIR, "resource-dummy-yellow.xml");
protected static final String RESOURCE_DUMMY_YELLOW_OID = "10000000-0000-0000-0000-000000000704";
protected static final String RESOURCE_DUMMY_YELLOW_NAME = "yellow";
protected static final String RESOURCE_DUMMY_YELLOW_NAMESPACE = MidPointConstants.NS_RI;
// Green dummy resource is authoritative
protected static final File RESOURCE_DUMMY_GREEN_FILE = new File(COMMON_DIR, "resource-dummy-green.xml");
protected static final File RESOURCE_DUMMY_GREEN_DEPRECATED_FILE = new File(COMMON_DIR, "resource-dummy-green-deprecated.xml");
protected static final File RESOURCE_DUMMY_GREEN_CACHING_FILE = new File(COMMON_DIR, "resource-dummy-green-caching.xml");
protected static final String RESOURCE_DUMMY_GREEN_OID = "10000000-0000-0000-0000-000000000404";
protected static final String RESOURCE_DUMMY_GREEN_NAME = "green";
protected static final String RESOURCE_DUMMY_GREEN_NAMESPACE = MidPointConstants.NS_RI;
// This is authoritative resource similar to green resource but it has a bit wilder inbound mappings.
protected static final File RESOURCE_DUMMY_EMERALD_FILE = new File(COMMON_DIR, "resource-dummy-emerald.xml");
protected static final File RESOURCE_DUMMY_EMERALD_DEPRECATED_FILE = new File(COMMON_DIR, "resource-dummy-emerald-deprecated.xml");
protected static final String RESOURCE_DUMMY_EMERALD_OID = "10000000-0000-0000-0000-00000000e404";
protected static final String RESOURCE_DUMMY_EMERALD_NAME = "emerald";
protected static final String RESOURCE_DUMMY_EMERALD_NAMESPACE = MidPointConstants.NS_RI;
// Black dummy resource for testing tolerant attributes
protected static final File RESOURCE_DUMMY_BLACK_FILE = new File(COMMON_DIR, "resource-dummy-black.xml");
protected static final String RESOURCE_DUMMY_BLACK_OID = "10000000-0000-0000-0000-000000000305";
protected static final String RESOURCE_DUMMY_BLACK_NAME = "black";
protected static final String RESOURCE_DUMMY_BLACK_NAMESPACE = MidPointConstants.NS_RI;
// Black dummy resource for testing tolerant attributes
protected static final File RESOURCE_DUMMY_RELATIVE_FILE = new File(COMMON_DIR, "resource-dummy-relative.xml");
protected static final String RESOURCE_DUMMY_RELATIVE_OID = "adcd4654-0f15-11e7-8337-0bdf60ad7bcd";
protected static final String RESOURCE_DUMMY_RELATIVE_NAME = "relative";
protected static final String RESOURCE_DUMMY_RELATIVE_NAMESPACE = MidPointConstants.NS_RI;
// Orange dummy resource for testing associations with resource-provided referential integrity
// It also have very little outbound expressions and it has some strange inbound expressions.
protected static final File RESOURCE_DUMMY_ORANGE_FILE = new File(COMMON_DIR, "resource-dummy-orange.xml");
protected static final String RESOURCE_DUMMY_ORANGE_OID = "10000000-0000-0000-0000-000000001104";
protected static final String RESOURCE_DUMMY_ORANGE_NAME = "orange";
protected static final String RESOURCE_DUMMY_ORANGE_NAMESPACE = MidPointConstants.NS_RI;
protected static final QName RESOURCE_DUMMY_ORANGE_ASSOCIATION_CREW_QNAME = new QName(RESOURCE_DUMMY_ORANGE_NAMESPACE, "crew");
protected static final String RESOURCE_DUMMY_SCHEMALESS_FILENAME = COMMON_DIR + "/resource-dummy-schemaless-no-schema.xml";
protected static final String RESOURCE_DUMMY_SCHEMALESS_OID = "ef2bc95b-76e0-59e2-86d6-9999dddd0000";
protected static final String RESOURCE_DUMMY_SCHEMALESS_NAME = "schemaless";
protected static final String RESOURCE_DUMMY_SCHEMALESS_NAMESPACE = MidPointConstants.NS_RI;
// Upcase resource turns all names to upper case. It is also caseInsensitive resource
protected static final File RESOURCE_DUMMY_UPCASE_FILE = new File(COMMON_DIR, "resource-dummy-upcase.xml");
protected static final String RESOURCE_DUMMY_UPCASE_OID = "10000000-0000-0000-0000-000000001204";
protected static final String RESOURCE_DUMMY_UPCASE_NAME = "upcase";
protected static final String RESOURCE_DUMMY_UPCASE_NAMESPACE = MidPointConstants.NS_RI;
protected static final QName RESOURCE_DUMMY_UPCASE_ASSOCIATION_GROUP_QNAME = new QName(RESOURCE_DUMMY_UPCASE_NAMESPACE, "group");
protected static final String RESOURCE_DUMMY_FAKE_FILENAME = COMMON_DIR + "/resource-dummy-fake.xml";
protected static final String RESOURCE_DUMMY_FAKE_OID = "10000000-0000-0000-0000-00000000000f";
public static final File ROLE_SUPERUSER_FILE = new File(COMMON_DIR, "role-superuser.xml");
protected static final String ROLE_SUPERUSER_OID = "00000000-0000-0000-0000-000000000004";
protected static final File ROLE_PIRATE_FILE = new File(COMMON_DIR, "role-pirate.xml");
protected static final String ROLE_PIRATE_OID = "12345678-d34d-b33f-f00d-555555556666";
protected static final String ROLE_PIRATE_NAME = "Pirate";
protected static final String ROLE_PIRATE_DESCRIPTION = "Scurvy Pirates";
protected static final String ROLE_PIRATE_TITLE = "Bloody Pirate";
protected static final String ROLE_PIRATE_WEAPON = "cutlass";
protected static final File ROLE_CARIBBEAN_PIRATE_FILE = new File(COMMON_DIR, "role-caribbean-pirate.xml");
protected static final String ROLE_CARIBBEAN_PIRATE_OID = "0719ec66-edd9-11e6-bd70-03a74157ff9e";
protected static final File ROLE_PIRATE_GREEN_FILE = new File(COMMON_DIR, "role-pirate-green.xml");
protected static final String ROLE_PIRATE_GREEN_OID = "12345678-d34d-b33f-f00d-555555557777";
protected static final String ROLE_PIRATE_GREEN_NAME = "Pirate Green";
protected static final String ROLE_PIRATE_GREEN_DESCRIPTION = "Scurvy Pirates";
protected static final File ROLE_PIRATE_RELATIVE_FILE = new File(COMMON_DIR, "role-pirate-relative.xml");
protected static final String ROLE_PIRATE_RELATIVE_OID = "4a579cd0-0f17-11e7-967c-130ecd6fb7dc";
protected static final String ROLE_PIRAT_RELATIVEE_NAME = "Relative Pirate";
protected static final File ROLE_BUCCANEER_GREEN_FILE = new File(COMMON_DIR, "role-buccaneer-green.xml");
protected static final String ROLE_BUCCANEER_GREEN_OID = "12345678-d34d-b33f-f00d-555555558888";
protected static final String ROLE_BUCCANEER_GREEN_NAME = "Bucaneers Green";
protected static final String ROLE_BUCCANEER_GREEN_DESCRIPTION = "Scurvy Bucaneers";
protected static final String ROLE_NICE_PIRATE_FILENAME = COMMON_DIR + "/role-nice-pirate.xml";
protected static final String ROLE_NICE_PIRATE_OID = "12345678-d34d-b33f-f00d-555555556677";
protected static final String ROLE_CAPTAIN_FILENAME = COMMON_DIR + "/role-captain.xml";
protected static final String ROLE_CAPTAIN_OID = "12345678-d34d-b33f-f00d-55555555cccc";
// Excludes role "pirate"
protected static final File ROLE_JUDGE_FILE = new File(COMMON_DIR, "role-judge.xml");
protected static final String ROLE_JUDGE_OID = "12345111-1111-2222-1111-121212111111";
protected static final String ROLE_JUDGE_TITLE = "Honorable Justice";
protected static final String ROLE_JUDGE_DRINK = "tea";
protected static final String ROLE_JUDGE_DESCRIPTION = "Role with role exclusions";
protected static final String ROLE_JUDGE_POLICY_RULE_EXCLUSION_PREFIX = "criminal exclusion: ";
protected static final File ROLE_JUDGE_DEPRECATED_FILE = new File(COMMON_DIR, "role-judge-deprecated.xml");
protected static final String ROLE_JUDGE_DEPRECATED_OID = "12345111-1111-2222-1111-d21212111111";
protected static final File ROLE_THIEF_FILE = new File(COMMON_DIR, "role-thief.xml");
protected static final String ROLE_THIEF_OID = "b189fcb8-1ff9-11e5-8912-001e8c717e5b";
protected static final File ROLE_EMPTY_FILE = new File(COMMON_DIR, "role-empty.xml");
protected static final String ROLE_EMPTY_OID = "12345111-1111-2222-1111-121212111112";
protected static final File ROLE_SAILOR_FILE = new File(COMMON_DIR, "role-sailor.xml");
protected static final String ROLE_SAILOR_OID = "12345111-1111-2222-1111-121212111113";
protected static final String ROLE_SAILOR_DRINK = "grog";
protected static final File ROLE_RED_SAILOR_FILE = new File(COMMON_DIR, "role-red-sailor.xml");
protected static final String ROLE_RED_SAILOR_OID = "12345111-1111-2222-1111-121212111223";
protected static final File ROLE_CYAN_SAILOR_FILE = new File(COMMON_DIR, "role-cyan-sailor.xml");
protected static final String ROLE_CYAN_SAILOR_OID = "d3abd794-9c30-11e6-bb5a-af14bf2cc29b";
protected static final File ROLE_STRONG_SAILOR_FILE = new File(COMMON_DIR, "role-strong-sailor.xml");
protected static final String ROLE_STRONG_SAILOR_OID = "0bf7532e-7d15-11e7-8594-7bff6e0adc6e";
protected static final File ROLE_DRINKER_FILE = new File(COMMON_DIR, "role-drinker.xml");
protected static final String ROLE_DRINKER_OID = "0abbde4c-ab3f-11e6-910d-d7dabf5f09f0";
protected static final File ROLE_PERSONA_ADMIN_FILE = new File(COMMON_DIR, "role-persona-admin.xml");
protected static final String ROLE_PERSONA_ADMIN_OID = "16813ae6-2c0a-11e7-91fc-8333c244329e";
protected static final File USER_JACK_FILE = new File(COMMON_DIR, "user-jack.xml");
protected static final String USER_JACK_OID = "c0c010c0-d34d-b33f-f00d-111111111111";
protected static final String USER_JACK_USERNAME = "jack";
protected static final String USER_JACK_FULL_NAME = "Jack Sparrow";
protected static final String USER_JACK_GIVEN_NAME = "Jack";
protected static final String USER_JACK_FAMILY_NAME = "Sparrow";
protected static final String USER_JACK_ADDITIONAL_NAME = "Jackie";
protected static final String USER_JACK_EMPLOYEE_TYPE = "CAPTAIN";
protected static final String USER_JACK_EMPLOYEE_NUMBER = "emp1234";
protected static final String USER_JACK_LOCALITY = "Caribbean";
protected static final String USER_JACK_PASSWORD = "deadmentellnotales";
protected static final File USER_BARBOSSA_FILE = new File(COMMON_DIR, "user-barbossa.xml");
protected static final String USER_BARBOSSA_OID = "c0c010c0-d34d-b33f-f00d-111111111112";
protected static final String USER_BARBOSSA_USERNAME = "barbossa";
protected static final String USER_BARBOSSA_FULL_NAME = "Hector Barbossa";
protected static final File USER_GUYBRUSH_FILE = new File (COMMON_DIR, "user-guybrush.xml");
protected static final String USER_GUYBRUSH_OID = "c0c010c0-d34d-b33f-f00d-111111111116";
protected static final String USER_GUYBRUSH_USERNAME = "guybrush";
protected static final String USER_GUYBRUSH_FULL_NAME = "Guybrush Threepwood";
protected static final String USER_GUYBRUSH_GIVEN_NAME = "Guybrush";
protected static final String USER_GUYBRUSH_FAMILY_NAME = "Threepwood";
protected static final String USER_GUYBRUSH_LOCALITY = "Melee Island";
// Largo does not have a full name set, employeeType=PIRATE
protected static final File USER_LARGO_FILE = new File(COMMON_DIR, "user-largo.xml");
protected static final String USER_LARGO_OID = "c0c010c0-d34d-b33f-f00d-111111111118";
protected static final String USER_LARGO_USERNAME = "largo";
// Rapp does not have a full name set, employeeType=COOK
protected static final File USER_RAPP_FILE = new File(COMMON_DIR, "user-rapp.xml");
protected static final String USER_RAPP_OID = "c0c010c0-d34d-b33f-f00d-11111111c008";
protected static final String USER_RAPP_USERNAME = "rapp";
protected static final String USER_RAPP_FULLNAME = "Rapp Scallion";
// Herman has a validity dates set in the activation part
protected static final File USER_HERMAN_FILE = new File(COMMON_DIR, "user-herman.xml");
protected static final String USER_HERMAN_OID = "c0c010c0-d34d-b33f-f00d-111111111122";
protected static final String USER_HERMAN_USERNAME = "herman";
protected static final String USER_HERMAN_GIVEN_NAME = "Herman";
protected static final String USER_HERMAN_FAMILY_NAME = "Toothrot";
protected static final String USER_HERMAN_FULL_NAME = "Herman Toothrot";
protected static final String USER_HERMAN_PASSWORD = "m0nk3y";
protected static final Date USER_HERMAN_VALID_FROM_DATE = MiscUtil.asDate(1700, 5, 30, 11, 00, 00);
protected static final Date USER_HERMAN_VALID_TO_DATE = MiscUtil.asDate(2233, 3, 23, 18, 30, 00);
// Has null name, doesn not have given name, no employeeType
protected static final String USER_THREE_HEADED_MONKEY_FILENAME = COMMON_DIR + "/user-three-headed-monkey.xml";
protected static final String USER_THREE_HEADED_MONKEY_OID = "c0c010c0-d34d-b33f-f00d-110011001133";
// Elaine has account on the dummy resources (default, red, blue)
// The accounts are also assigned
static final File USER_ELAINE_FILE = new File (COMMON_DIR, "user-elaine.xml");
protected static final String USER_ELAINE_OID = "c0c010c0-d34d-b33f-f00d-11111111111e";
protected static final String USER_ELAINE_USERNAME = "elaine";
// Captain Kate Capsize does not exist in the repo. This user is designed to be added.
// She has account on dummy resources (default, red, blue)
// The accounts are also assigned
static final File USER_CAPSIZE_FILE = new File(COMMON_DIR, "user-capsize.xml");
protected static final String USER_CAPSIZE_OID = "c0c010c0-d34d-b33f-f00d-11c1c1c1c11c";
protected static final String USER_CAPSIZE_USERNAME = "capsize";
protected static final File USER_DRAKE_FILE = new File(COMMON_DIR, "user-drake.xml");
protected static final String USER_DRAKE_OID = "c0c010c0-d34d-b33f-f00d-11d1d1d1d1d1";
protected static final String USER_DRAKE_USERNAME = "drake";
public static final File ACCOUNT_JACK_DUMMY_FILE = new File(COMMON_DIR, "account-jack-dummy.xml");
public static final File ACCOUNT_JACK_DUMMY_RED_FILE = new File(COMMON_DIR, "account-jack-dummy-red.xml");
public static final String ACCOUNT_JACK_DUMMY_USERNAME = "jack";
public static final String ACCOUNT_JACK_DUMMY_FULLNAME = "Jack Sparrow";
public static final File ACCOUNT_HERMAN_DUMMY_FILE = new File(COMMON_DIR, "account-herman-dummy.xml");
public static final String ACCOUNT_HERMAN_DUMMY_OID = "22220000-2200-0000-0000-444400004444";
public static final String ACCOUNT_HERMAN_DUMMY_USERNAME = "ht";
public static final String ACCOUNT_HERMAN_OPENDJ_FILENAME = COMMON_DIR + "/account-herman-opendj.xml";
public static final String ACCOUNT_HERMAN_OPENDJ_OID = "22220000-2200-0000-0000-333300003333";
public static final File ACCOUNT_SHADOW_GUYBRUSH_DUMMY_FILE = new File(COMMON_DIR, "account-shadow-guybrush-dummy.xml");
public static final String ACCOUNT_SHADOW_GUYBRUSH_OID = "22226666-2200-6666-6666-444400004444";
public static final String ACCOUNT_GUYBRUSH_DUMMY_USERNAME = "guybrush";
public static final String ACCOUNT_GUYBRUSH_DUMMY_FULLNAME = "Guybrush Threepwood";
public static final String ACCOUNT_GUYBRUSH_DUMMY_LOCATION = "Melee Island";
public static final File ACCOUNT_GUYBRUSH_DUMMY_FILE = new File (COMMON_DIR, "account-guybrush-dummy.xml");
public static final File ACCOUNT_GUYBRUSH_DUMMY_RED_FILE = new File(COMMON_DIR, "account-guybrush-dummy-red.xml");
public static final String ACCOUNT_SHADOW_JACK_DUMMY_FILENAME = COMMON_DIR + "/account-shadow-jack-dummy.xml";
public static final String ACCOUNT_DAVIEJONES_DUMMY_USERNAME = "daviejones";
public static final String ACCOUNT_CALYPSO_DUMMY_USERNAME = "calypso";
public static final File ACCOUNT_SHADOW_ELAINE_DUMMY_FILE = new File(COMMON_DIR, "account-elaine-dummy.xml");
public static final String ACCOUNT_SHADOW_ELAINE_DUMMY_OID = "c0c010c0-d34d-b33f-f00d-22220004000e";
public static final String ACCOUNT_ELAINE_DUMMY_USERNAME = USER_ELAINE_USERNAME;
public static final String ACCOUNT_ELAINE_DUMMY_FULLNAME = "Elaine Marley";
public static final File ACCOUNT_SHADOW_ELAINE_DUMMY_RED_FILE = new File(COMMON_DIR, "account-elaine-dummy-red.xml");
public static final String ACCOUNT_SHADOW_ELAINE_DUMMY_RED_OID = "c0c010c0-d34d-b33f-f00d-22220104000e";
public static final String ACCOUNT_ELAINE_DUMMY_RED_USERNAME = USER_ELAINE_USERNAME;
public static final File ACCOUNT_SHADOW_ELAINE_DUMMY_BLUE_FILE = new File(COMMON_DIR, "account-elaine-dummy-blue.xml");
public static final String ACCOUNT_SHADOW_ELAINE_DUMMY_BLUE_OID = "c0c010c0-d34d-b33f-f00d-22220204000e";
public static final String ACCOUNT_ELAINE_DUMMY_BLUE_USERNAME = USER_ELAINE_USERNAME;
public static final File GROUP_PIRATE_DUMMY_FILE = new File(COMMON_DIR, "group-pirate-dummy.xml");
public static final String GROUP_PIRATE_DUMMY_NAME = "pirate";
public static final String GROUP_PIRATE_DUMMY_DESCRIPTION = "Scurvy pirates";
public static final File SHADOW_GROUP_DUMMY_TESTERS_FILE = new File(COMMON_DIR, "group-testers-dummy.xml");
public static final String SHADOW_GROUP_DUMMY_TESTERS_OID = "20000000-0000-0000-3333-000000000002";
public static final String GROUP_DUMMY_TESTERS_NAME = "testers";
public static final String GROUP_DUMMY_TESTERS_DESCRIPTION = "To boldly go where no pirate has gone before";
public static final File GROUP_SHADOW_JOKER_DUMMY_UPCASE_FILE = new File(COMMON_DIR, "group-shadow-dummy-upcase-joker.xml");
public static final String GROUP_SHADOW_JOKER_DUMMY_UPCASE_OID = "bc2a1d98-9ca4-11e4-a600-001e8c717e5b";
public static final String GROUP_SHADOW_JOKER_DUMMY_UPCASE_NAME = "joker";
public static final String GROUP_JOKER_DUMMY_UPCASE_NAME = "JOKER";
public static final String DUMMY_ORG_TOP_NAME = DummyResourceContoller.ORG_TOP_NAME;
protected static final File PASSWORD_POLICY_GLOBAL_FILE = new File(COMMON_DIR, "password-policy-global.xml");
protected static final String PASSWORD_POLICY_GLOBAL_OID = "12344321-0000-0000-0000-000000000003";
protected static final File PASSWORD_POLICY_BENEVOLENT_FILE = new File(COMMON_DIR, "password-policy-benevolent.xml");
protected static final String PASSWORD_POLICY_BENEVOLENT_OID = "ed8026dc-569a-11e7-abdf-4fce56706755";
protected static final File ORG_MONKEY_ISLAND_FILE = new File(COMMON_DIR, "org-monkey-island.xml");
protected static final String ORG_GOVERNOR_OFFICE_OID = "00000000-8888-6666-0000-100000000001";
protected static final String ORG_SCUMM_BAR_OID = "00000000-8888-6666-0000-100000000006";
protected static final String ORG_SCUMM_BAR_NAME = "F0006";
protected static final String ORG_SCUMM_BAR_DISPLAY_NAME = "Scumm Bar";
protected static final String ORG_MINISTRY_OF_OFFENSE_OID = "00000000-8888-6666-0000-100000000003";
protected static final String ORG_MINISTRY_OF_DEFENSE_OID = "00000000-8888-6666-0000-100000000002";
protected static final String ORG_MINISTRY_OF_RUM_OID = "00000000-8888-6666-0000-100000000004";
protected static final String ORG_MINISTRY_OF_RUM_NAME = "F0004";
protected static final String ORG_SWASHBUCKLER_SECTION_OID = "00000000-8888-6666-0000-100000000005";
protected static final String ORG_PROJECT_ROOT_OID = "00000000-8888-6666-0000-200000000000";
protected static final String ORG_SAVE_ELAINE_OID = "00000000-8888-6666-0000-200000000001";
protected static final String ORG_KIDNAP_AND_MARRY_ELAINE_OID = "00000000-8888-6666-0000-200000000002";
protected static final String ORG_TYPE_FUNCTIONAL = "functional";
protected static final String ORG_TYPE_PROJECT = "project";
protected static final File SERVICE_SHIP_SEA_MONKEY_FILE = new File(COMMON_DIR, "service-ship-sea-monkey.xml");
protected static final String SERVICE_SHIP_SEA_MONKEY_OID = "914b94be-1901-11e6-9269-972ee32cd8db";
protected static final String TASK_RECONCILE_DUMMY_FILENAME = COMMON_DIR + "/task-reconcile-dummy.xml";
protected static final String TASK_RECONCILE_DUMMY_OID = "10000000-0000-0000-5656-565600000004";
protected static final String TASK_RECONCILE_DUMMY_BLUE_FILENAME = COMMON_DIR + "/task-reconcile-dummy-blue.xml";
protected static final String TASK_RECONCILE_DUMMY_BLUE_OID = "10000000-0000-0000-5656-565600000204";
protected static final String TASK_RECONCILE_DUMMY_GREEN_FILENAME = COMMON_DIR + "/task-reconcile-dummy-green.xml";
protected static final String TASK_RECONCILE_DUMMY_GREEN_OID = "10000000-0000-0000-5656-565600000404";
protected static final String TASK_LIVE_SYNC_DUMMY_FILENAME = COMMON_DIR + "/task-dumy-livesync.xml";
protected static final String TASK_LIVE_SYNC_DUMMY_OID = "10000000-0000-0000-5555-555500000004";
protected static final String TASK_LIVE_SYNC_DUMMY_BLUE_FILENAME = COMMON_DIR + "/task-dumy-blue-livesync.xml";
protected static final String TASK_LIVE_SYNC_DUMMY_BLUE_OID = "10000000-0000-0000-5555-555500000204";
protected static final String TASK_LIVE_SYNC_DUMMY_GREEN_FILENAME = COMMON_DIR + "/task-dumy-green-livesync.xml";
protected static final String TASK_LIVE_SYNC_DUMMY_GREEN_OID = "10000000-0000-0000-5555-555500000404";
protected static final String TASK_VALIDITY_SCANNER_FILENAME = COMMON_DIR + "/task-validity-scanner.xml";
protected static final String TASK_VALIDITY_SCANNER_OID = "10000000-0000-0000-5555-555505060400";
protected static final File TASK_TRIGGER_SCANNER_FILE = new File(COMMON_DIR, "task-trigger-scanner.xml");
protected static final String TASK_TRIGGER_SCANNER_OID = "00000000-0000-0000-0000-000000000007";
protected static final File TASK_MOCK_JACK_FILE = new File(COMMON_DIR, "task-mock-jack.xml");
protected static final String TASK_MOCK_JACK_OID = "10000000-0000-0000-5656-565674633311";
public static final File LOOKUP_LANGUAGES_FILE = new File(COMMON_DIR, "lookup-languages.xml");
public static final String LOOKUP_LANGUAGES_OID = "70000000-0000-0000-1111-000000000001";
public static final String LOOKUP_LANGUAGES_NAME = "Languages";
protected static final File SECURITY_POLICY_FILE = new File(COMMON_DIR, "security-policy.xml");
protected static final String SECURITY_POLICY_OID = "28bf845a-b107-11e3-85bc-001e8c717e5b";
protected static final String NS_PIRACY = "http://midpoint.evolveum.com/xml/ns/samples/piracy";
protected static final QName PIRACY_SHIP = new QName(NS_PIRACY, "ship");
protected static final QName PIRACY_SHIP_BROKEN = new QName(NS_PIRACY, "ship-broken");
protected static final QName PIRACY_TALES = new QName(NS_PIRACY, "tales");
protected static final QName PIRACY_WEAPON = new QName(NS_PIRACY, "weapon");
protected static final QName PIRACY_LOOT = new QName(NS_PIRACY, "loot");
protected static final QName PIRACY_BAD_LUCK = new QName(NS_PIRACY, "badLuck");
protected static final QName PIRACY_FUNERAL_TIMESTAMP = new QName(NS_PIRACY, "funeralTimestamp");
protected static final QName PIRACY_SEA_QNAME = new QName(NS_PIRACY, "sea");
protected static final QName PIRACY_COLORS = new QName(NS_PIRACY, "colors");
protected static final QName PIRACY_MARK = new QName(NS_PIRACY, "mark");
protected static final QName PIRACY_KEY = new QName(NS_PIRACY, "key");
protected static final QName PIRACY_BINARY_ID = new QName(NS_PIRACY, "binaryId");
protected static final ItemPath ROLE_EXTENSION_COST_CENTER_PATH = new ItemPath(RoleType.F_EXTENSION, new QName(NS_PIRACY, "costCenter"));
protected static final String DUMMY_ACCOUNT_ATTRIBUTE_SEA_NAME = "sea";
protected static final String DUMMY_ACCOUNT_ATTRIBUTE_MATE_NAME = "mate";
protected static final String INTENT_TEST = "test";
protected static final String INTENT_DUMMY_GROUP = "group";
protected static final String INTENT_DUMMY_PRIVILEGE = "privilege";
// Authorizations
protected static final String NS_TEST_AUTZ = "http://midpoint.evolveum.com/xml/ns/test/authorization";
protected static final QName AUTZ_LOOT_QNAME = new QName(NS_TEST_AUTZ, "loot");
protected static final String AUTZ_LOOT_URL = QNameUtil.qNameToUri(AUTZ_LOOT_QNAME);
protected static final QName AUTZ_COMMAND_QNAME = new QName(NS_TEST_AUTZ, "command");
protected static final String AUTZ_COMMAND_URL = QNameUtil.qNameToUri(AUTZ_COMMAND_QNAME);
protected static final QName AUTZ_PUNISH_QNAME = new QName(NS_TEST_AUTZ, "punish");
protected static final String AUTZ_PUNISH_URL = QNameUtil.qNameToUri(AUTZ_PUNISH_QNAME);
protected static final QName AUTZ_CAPSIZE_QNAME = new QName(NS_TEST_AUTZ, "capsize");
protected static final String AUTZ_CAPSIZE_URL = QNameUtil.qNameToUri(AUTZ_CAPSIZE_QNAME);
protected static final QName AUTZ_SUPERSPECIAL_QNAME = new QName(NS_TEST_AUTZ, "superspecial");
protected static final String AUTZ_SUPERSPECIAL_URL = QNameUtil.qNameToUri(AUTZ_SUPERSPECIAL_QNAME);
protected static final QName AUTZ_NONSENSE_QNAME = new QName(NS_TEST_AUTZ, "nonsense");
protected static final String AUTZ_NONSENSE_URL = QNameUtil.qNameToUri(AUTZ_NONSENSE_QNAME);
protected static final QName AUTZ_SAIL_QNAME = new QName(NS_TEST_AUTZ, "sail");
protected static final String AUTZ_SAIL_URL = QNameUtil.qNameToUri(AUTZ_SAIL_QNAME);
protected static final String NOTIFIER_ACCOUNT_PASSWORD_NAME = "accountPasswordNotifier";
protected static final String NOTIFIER_ACCOUNT_ACTIVATION_NAME = "accountActivationNotifier";
private static final Trace LOGGER = TraceManager.getTrace(AbstractConfiguredModelIntegrationTest.class);
protected PrismObject<UserType> userAdministrator;
public AbstractConfiguredModelIntegrationTest() {
super();
}
@Override
public void initSystem(Task initTask, OperationResult initResult) throws Exception {
LOGGER.trace("initSystem");
// We want logging config from logback-test.xml and not from system config object
InternalsConfig.setAvoidLoggingChange(true);
super.initSystem(initTask, initResult);
modelService.postInit(initResult);
ManualConnectorInstance.setRandomDelayRange(0);
// System Configuration
try {
repoAddObjectFromFile(getSystemConfigurationFile(), initResult);
} catch (ObjectAlreadyExistsException e) {
throw new ObjectAlreadyExistsException("System configuration already exists in repository;" +
"looks like the previous test haven't cleaned it up", e);
}
// Users
userAdministrator = repoAddObjectFromFile(USER_ADMINISTRATOR_FILE, UserType.class, initResult);
repoAddObjectFromFile(ROLE_SUPERUSER_FILE, initResult);
login(userAdministrator);
}
protected int getNumberOfRoles() {
return 1; // Superuser role
}
protected File getSystemConfigurationFile() {
return SYSTEM_CONFIGURATION_FILE;
}
protected PrismObject<UserType> getDefaultActor() {
return userAdministrator;
}
@Override
public void run(IHookCallBack callBack, ITestResult testResult) {
long time = System.currentTimeMillis();
LOGGER.info("###>>> run start");
super.run(callBack, testResult);
LOGGER.info("###>>> run end ({}ms)", new Object[]{(System.currentTimeMillis() - time)});
}
@AfterClass
@Override
protected void springTestContextAfterTestClass() throws Exception {
long time = System.currentTimeMillis();
LOGGER.info("###>>> springTestContextAfterTestClass start");
super.springTestContextAfterTestClass();
nullAllFields(this, getClass());
LOGGER.info("###>>> springTestContextAfterTestClass end ({}ms)", new Object[]{(System.currentTimeMillis() - time)});
}
/**
* This method null all fields which are not static, final or primitive type.
*
* All this is just to make GC work during DirtiesContext after every test class,
* because memory consumption is too big. Test class instances can't be GCed
* immediately. If they holds autowired fields like sessionFactory (for example
* through SqlRepositoryService impl), their memory footprint is getting big.
*
* @param forClass
* @throws Exception
*/
public static void nullAllFields(Object object, Class forClass) throws Exception{
if (forClass.getSuperclass() != null) {
nullAllFields(object, forClass.getSuperclass());
}
for (Field field : forClass.getDeclaredFields()) {
if (Modifier.isFinal(field.getModifiers())
|| Modifier.isStatic(field.getModifiers())
|| field.getType().isPrimitive()) {
continue;
}
nullField(object, field);
}
}
private static void nullField(Object obj, Field field) throws Exception {
LOGGER.info("Setting {} to null on {}.", new Object[]{field.getName(), obj.getClass().getSimpleName()});
boolean accessible = field.isAccessible();
if (!accessible) {
field.setAccessible(true);
}
field.set(obj, null);
field.setAccessible(accessible);
}
@AfterMethod
@Override
protected void springTestContextAfterTestMethod(Method testMethod) throws Exception {
long time = System.currentTimeMillis();
LOGGER.info("###>>> springTestContextAfterTestMethod start");
super.springTestContextAfterTestMethod(testMethod);
LOGGER.info("###>>> springTestContextAfterTestMethod end ({}ms)", new Object[]{(System.currentTimeMillis() - time)});
}
@BeforeClass
@Override
protected void springTestContextBeforeTestClass() throws Exception {
long time = System.currentTimeMillis();
LOGGER.info("###>>> springTestContextBeforeTestClass start");
super.springTestContextBeforeTestClass();
LOGGER.info("###>>> springTestContextBeforeTestClass end ({}ms)", new Object[]{(System.currentTimeMillis() - time)});
}
@BeforeMethod
@Override
protected void springTestContextBeforeTestMethod(Method testMethod) throws Exception {
long time = System.currentTimeMillis();
LOGGER.info("###>>> springTestContextBeforeTestMethod start");
super.springTestContextBeforeTestMethod(testMethod);
LOGGER.info("###>>> springTestContextBeforeTestMethod end ({}ms)", new Object[]{(System.currentTimeMillis() - time)});
}
@BeforeClass
@Override
protected void springTestContextPrepareTestInstance() throws Exception {
long time = System.currentTimeMillis();
LOGGER.info("###>>> springTestContextPrepareTestInstance start");
super.springTestContextPrepareTestInstance();
LOGGER.info("###>>> springTestContextPrepareTestInstance end ({}ms)", new Object[]{(System.currentTimeMillis() - time)});
}
protected PrismSchema getPiracySchema() {
return prismContext.getSchemaRegistry().findSchemaByNamespace(NS_PIRACY);
}
protected void assertLastRecomputeTimestamp(String taskOid, XMLGregorianCalendar startCal, XMLGregorianCalendar endCal) throws ObjectNotFoundException, SchemaException, SecurityViolationException, CommunicationException, ConfigurationException, ExpressionEvaluationException {
PrismObject<TaskType> task = getTask(taskOid);
display("Task", task);
PrismContainer<?> taskExtension = task.getExtension();
assertNotNull("No task extension", taskExtension);
PrismProperty<XMLGregorianCalendar> lastRecomputeTimestampProp = taskExtension.findProperty(SchemaConstants.MODEL_EXTENSION_LAST_SCAN_TIMESTAMP_PROPERTY_NAME);
assertNotNull("no lastRecomputeTimestamp property", lastRecomputeTimestampProp);
XMLGregorianCalendar lastRecomputeTimestamp = lastRecomputeTimestampProp.getRealValue();
assertNotNull("null lastRecomputeTimestamp", lastRecomputeTimestamp);
TestUtil.assertBetween("lastRecomputeTimestamp", startCal, endCal, lastRecomputeTimestamp);
}
protected void assertPasswordMetadata(PrismObject<UserType> user, boolean create, XMLGregorianCalendar start, XMLGregorianCalendar end) {
assertPasswordMetadata(user, create, start, end, USER_ADMINISTRATOR_OID, SchemaConstants.CHANNEL_GUI_USER_URI);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
protected void clearUserOrgAndRoleRefs(String userOid) throws ObjectNotFoundException, SchemaException, ObjectAlreadyExistsException, SecurityViolationException, CommunicationException, ConfigurationException, ExpressionEvaluationException {
OperationResult result = new OperationResult("clearUserOrgAndRoleRefs");
Collection modifications = new ArrayList<>();
ReferenceDelta parentOrgRefDelta = ReferenceDelta.createModificationReplace(
UserType.F_PARENT_ORG_REF, getUserDefinition(), (PrismReferenceValue)null);
modifications.add(parentOrgRefDelta);
ReferenceDelta roleMembershipRefDelta = ReferenceDelta.createModificationReplace(
UserType.F_ROLE_MEMBERSHIP_REF, getUserDefinition(), (PrismReferenceValue)null);
modifications.add(roleMembershipRefDelta);
repositoryService.modifyObject(UserType.class, userOid, modifications, result);
result.computeStatus();
TestUtil.assertSuccess(result);
PrismObject<UserType> userBefore = getUser(userOid);
display("User before", userBefore);
}
protected void assertEvaluatedRole(Collection<? extends EvaluatedAssignmentTarget> evaluatedRoles,
String expectedRoleOid) {
for (EvaluatedAssignmentTarget evalRole: evaluatedRoles) {
if (expectedRoleOid.equals(evalRole.getTarget().getOid())) {
return;
}
}
AssertJUnit.fail("Role "+expectedRoleOid+" no present in evaluated roles "+evaluatedRoles);
}
protected void assertSinglePasswordNotification(String dummyResourceName, String username,
String password) {
assertPasswordNotifications(1);
assertSingleDummyTransportMessage(NOTIFIER_ACCOUNT_PASSWORD_NAME,
getExpectedPasswordNotificationBody(dummyResourceName, username, password));
}
protected void assertPasswordNotifications(int expected) {
checkDummyTransportMessages(NOTIFIER_ACCOUNT_PASSWORD_NAME, expected);
}
protected void assertNoPasswordNotifications() {
checkDummyTransportMessages(NOTIFIER_ACCOUNT_PASSWORD_NAME, 0);
}
protected void assertHasPasswordNotification(String dummyResourceName, String username,
String password) {
assertHasDummyTransportMessage(NOTIFIER_ACCOUNT_PASSWORD_NAME,
getExpectedPasswordNotificationBody(dummyResourceName, username, password));
}
protected void assertSinglePasswordNotificationGenerated(String dummyResourceName, String username) {
assertPasswordNotifications(1);
String body = getDummyTransportMessageBody(NOTIFIER_ACCOUNT_PASSWORD_NAME, 0);
String expectedPrefix = getExpectedPasswordNotificationBodyPrefix(dummyResourceName, username);
if (!body.startsWith(expectedPrefix)) {
fail("Expected that "+dummyResourceName+" dummy password notification message starts with prefix '"+expectedPrefix+"', but it was: "+body);
}
String suffix = body.substring(expectedPrefix.length());
if (suffix.isEmpty()) {
fail("Empty password in "+dummyResourceName+" dummy password notification message");
}
}
protected String getExpectedPasswordNotificationBody(String dummyResourceName, String username,
String password) {
return getExpectedPasswordNotificationBodyPrefix(dummyResourceName, username) + password;
}
protected String getExpectedPasswordNotificationBodyPrefix(String dummyResourceName, String username) {
String resourceName = getDummyResourceType(dummyResourceName).getName().getOrig();
return "Password for account "+username+" on "+resourceName+" is: ";
}
protected void displayPasswordNotifications() {
displayNotifications(NOTIFIER_ACCOUNT_PASSWORD_NAME);
}
}
| |
/**
* Copyright 2006 OCLC Online Computer Library Center Licensed under the Apache
* License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or
* agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.oclc.oai.server.catalog.helpers;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.log4j.Logger;
import org.oclc.oai.util.OAIUtil;
import org.xml.sax.Attributes;
import org.xml.sax.helpers.DefaultHandler;
public class RecordStringHandler extends DefaultHandler {
private static final boolean debug = false;
private static final String OAI_NS = "http://www.openarchives.org/OAI/2.0/";
private static final String DATABASE_NS = "http://www.oclc.org/pears/";
// private static final String OAI_DC_NS = "http://www.openarchives.org/OAI/2.0/oai_dc/";
// private static final String MARC21_NS = "http://www.loc.gov/MARC21/slim";
// private static final String REG_NS = "http://info-uri.info/registry";
// private static final String MTX_NS = "http://www.w3.org/1999/xhtml";
// private static final String PRO_NS = "info:ofi/pro";
// private static final String XSD_NS = "http://www.w3.org/2001/XMLSchema";
// private static final String XSL_NS = "http://www.w3.org/1999/XSL/Transform";
private static final String XSI_NS = "http://www.w3.org/2001/XMLSchema-instance";
private SortedMap nativeRecords = new TreeMap();
private int recordFlag = 0;
private int metadataFlag = 0;
private StringWriter metadata = null;
private int recordidFlag = 0;
private StringBuffer recordid = null;
private String schemaLocation = null;
private int identifierFlag = 0;
private StringBuffer identifier = null;
private int datestampFlag = 0;
private StringBuffer datestamp = null;
private ArrayList setSpecs = null;
private int setSpecFlag = 0;
private StringBuffer setSpec = null;
private static Logger log = Logger.getLogger(RecordStringHandler.class);
// static {
// BasicConfigurator.configure();
// }
public SortedMap getNativeRecords() { return nativeRecords; }
public void startElement(String uri, String localName, String qName,
Attributes attrs) {
if (debug) {
log.info("startElement: " + uri + ", " + localName + ", "+ qName + ", ");
}
if (OAI_NS.equals(uri) && "record".equals(localName)) {
setSpecs = new ArrayList();
recordFlag++;
}
if (metadataFlag > 0) {
metadata.write("<" + getName(localName, qName));
if (attrs != null) {
for (int i=0; i<attrs.getLength(); ++i) {
String attributeName = getName(attrs.getLocalName(i),
attrs.getQName(i));
// modified by Colin DOig, 6 September 2006
// xmlEncode ",&,< etc within attributes
// previously invalid XML was being produced.
metadata.write(" " + attributeName + "=\"" +
OAIUtil.xmlEncode(attrs.getValue(i)) + "\"");
}
}
metadata.write(">");
}
if (schemaLocation == null
&& metadataFlag == 1) {
// && ((OAI_DC_NS.equals(uri) && "dc".equals(localName))
// || (XSD_NS.equals(uri) && "schema".equals(localName))
// || (XSL_NS.equals(uri) && "stylesheet".equals(localName))
// || (MARC21_NS.equals(uri) && "record".equals(localName))
// || (REG_NS.equals(uri) && "info-registry-entry".equals(localName))
// || (MTX_NS.equals(uri) && "html".equals(localName))
// || (PRO_NS.equals(uri) && "profile".equals(localName)))) {
schemaLocation = attrs.getValue(XSI_NS, "schemaLocation");
}
if (OAI_NS.equals(uri) && "metadata".equals(localName)) {
if (metadata == null) {
metadata = new StringWriter();
}
metadataFlag++;
}
if (OAI_NS.equals(uri) && "identifier".equals(localName)) {
if (identifier == null) {
identifier = new StringBuffer();
}
identifierFlag++;
}
if (DATABASE_NS.equals(uri) && "recordid".equals(localName)) {
if (recordid == null) {
recordid = new StringBuffer();
}
recordidFlag++;
}
if (OAI_NS.equals(uri) && "datestamp".equals(localName)) {
if (datestamp == null) {
datestamp = new StringBuffer();
}
datestampFlag++;
}
if (OAI_NS.equals(uri) && "setSpec".equals(localName)) {
if (setSpec == null) {
setSpec = new StringBuffer();
}
setSpecFlag++;
}
}
public void endElement(String uri, String localName, String qName) {
if (OAI_NS.equals(uri) && "identifier".equals(localName)) {
identifierFlag--;
}
if (DATABASE_NS.equals(uri) && "recordid".equals(localName)) {
recordidFlag--;
}
if (OAI_NS.equals(uri) && "datestamp".equals(localName)) {
datestampFlag--;
}
if (OAI_NS.equals(uri) && "setSpec".equals(localName)) {
setSpecs.add(setSpec.toString());
setSpec = null;
setSpecFlag--;
}
if (OAI_NS.equals(uri) && "record".equals(localName)) {
recordFlag--;
if (recordFlag == 0) {
HashMap nativeRecord = new HashMap();
nativeRecord.put("recordString", metadata.toString());
// logger.debug(metadata.toString());
if (debug) {
log.info("metadata: " + metadata.toString());
}
nativeRecord.put("localIdentifier", identifier.toString());
if (debug) {
log.info("localIdentifier=" + identifier.toString());
}
nativeRecord.put("recordid", recordid.toString());
if (debug) {
log.info("recordid=" + recordid.toString());
}
nativeRecord.put("schemaLocation", schemaLocation);
if (debug) {
log.info("schemaLocation=" + schemaLocation);
}
nativeRecord.put("datestamp", datestamp.toString());
if (debug) {
log.info("datestamp=" + datestamp.toString());
}
nativeRecord.put("setSpecs", setSpecs);
nativeRecords.put(recordid.toString().toLowerCase(), nativeRecord);
setSpecs = null;
identifier = null;
metadata = null;
recordid = null;
schemaLocation = null;
datestamp = null;
}
}
if (OAI_NS.equals(uri) && "metadata".equals(localName)) {
metadataFlag--;
}
if (metadataFlag > 0) {
metadata.write("</" + getName(localName, qName) + ">");
}
}
public void characters(char[] ch, int start, int length) {
String s = new String(ch, start, length);
if (metadataFlag > 0) {
metadata.write(OAIUtil.xmlEncode(s));
}
if (identifierFlag > 0) {
identifier.append(s);
}
if (recordidFlag > 0) {
recordid.append(s);
}
if (datestampFlag > 0) {
datestamp.append(s);
}
if (setSpecFlag > 0) {
setSpec.append(s);
}
}
private String getName(String s1, String s2) {
if (s2==null || "".equals(s2))
return s1;
else
return s2;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner.optimizations;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import io.prestosql.Session;
import io.prestosql.SystemSessionProperties;
import io.prestosql.metadata.Metadata;
import io.prestosql.metadata.TableProperties;
import io.prestosql.metadata.TableProperties.TablePartitioning;
import io.prestosql.spi.connector.ColumnHandle;
import io.prestosql.spi.connector.ConstantProperty;
import io.prestosql.spi.connector.GroupingProperty;
import io.prestosql.spi.connector.LocalProperty;
import io.prestosql.spi.connector.SortingProperty;
import io.prestosql.spi.predicate.NullableValue;
import io.prestosql.spi.type.Type;
import io.prestosql.sql.planner.DomainTranslator;
import io.prestosql.sql.planner.ExpressionInterpreter;
import io.prestosql.sql.planner.NoOpSymbolResolver;
import io.prestosql.sql.planner.OrderingScheme;
import io.prestosql.sql.planner.Symbol;
import io.prestosql.sql.planner.TypeAnalyzer;
import io.prestosql.sql.planner.TypeProvider;
import io.prestosql.sql.planner.optimizations.ActualProperties.Global;
import io.prestosql.sql.planner.plan.AggregationNode;
import io.prestosql.sql.planner.plan.ApplyNode;
import io.prestosql.sql.planner.plan.AssignUniqueId;
import io.prestosql.sql.planner.plan.DeleteNode;
import io.prestosql.sql.planner.plan.DistinctLimitNode;
import io.prestosql.sql.planner.plan.EnforceSingleRowNode;
import io.prestosql.sql.planner.plan.ExchangeNode;
import io.prestosql.sql.planner.plan.ExplainAnalyzeNode;
import io.prestosql.sql.planner.plan.FilterNode;
import io.prestosql.sql.planner.plan.GroupIdNode;
import io.prestosql.sql.planner.plan.IndexJoinNode;
import io.prestosql.sql.planner.plan.IndexSourceNode;
import io.prestosql.sql.planner.plan.JoinNode;
import io.prestosql.sql.planner.plan.LateralJoinNode;
import io.prestosql.sql.planner.plan.LimitNode;
import io.prestosql.sql.planner.plan.MarkDistinctNode;
import io.prestosql.sql.planner.plan.OutputNode;
import io.prestosql.sql.planner.plan.PlanNode;
import io.prestosql.sql.planner.plan.PlanVisitor;
import io.prestosql.sql.planner.plan.ProjectNode;
import io.prestosql.sql.planner.plan.RowNumberNode;
import io.prestosql.sql.planner.plan.SampleNode;
import io.prestosql.sql.planner.plan.SemiJoinNode;
import io.prestosql.sql.planner.plan.SortNode;
import io.prestosql.sql.planner.plan.SpatialJoinNode;
import io.prestosql.sql.planner.plan.StatisticsWriterNode;
import io.prestosql.sql.planner.plan.TableFinishNode;
import io.prestosql.sql.planner.plan.TableScanNode;
import io.prestosql.sql.planner.plan.TableWriterNode;
import io.prestosql.sql.planner.plan.TopNNode;
import io.prestosql.sql.planner.plan.TopNRowNumberNode;
import io.prestosql.sql.planner.plan.UnnestNode;
import io.prestosql.sql.planner.plan.ValuesNode;
import io.prestosql.sql.planner.plan.WindowNode;
import io.prestosql.sql.tree.CoalesceExpression;
import io.prestosql.sql.tree.Expression;
import io.prestosql.sql.tree.NodeRef;
import io.prestosql.sql.tree.SymbolReference;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static io.prestosql.SystemSessionProperties.planWithTableNodePartitioning;
import static io.prestosql.spi.predicate.TupleDomain.extractFixedValues;
import static io.prestosql.sql.planner.SystemPartitioningHandle.ARBITRARY_DISTRIBUTION;
import static io.prestosql.sql.planner.optimizations.ActualProperties.Global.arbitraryPartition;
import static io.prestosql.sql.planner.optimizations.ActualProperties.Global.coordinatorSingleStreamPartition;
import static io.prestosql.sql.planner.optimizations.ActualProperties.Global.partitionedOn;
import static io.prestosql.sql.planner.optimizations.ActualProperties.Global.singleStreamPartition;
import static io.prestosql.sql.planner.optimizations.ActualProperties.Global.streamPartitionedOn;
import static io.prestosql.sql.planner.plan.ExchangeNode.Scope.LOCAL;
import static io.prestosql.sql.planner.plan.ExchangeNode.Scope.REMOTE;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toMap;
public class PropertyDerivations
{
private PropertyDerivations() {}
public static ActualProperties derivePropertiesRecursively(PlanNode node, Metadata metadata, Session session, TypeProvider types, TypeAnalyzer typeAnalyzer)
{
List<ActualProperties> inputProperties = node.getSources().stream()
.map(source -> derivePropertiesRecursively(source, metadata, session, types, typeAnalyzer))
.collect(toImmutableList());
return deriveProperties(node, inputProperties, metadata, session, types, typeAnalyzer);
}
public static ActualProperties deriveProperties(PlanNode node, List<ActualProperties> inputProperties, Metadata metadata, Session session, TypeProvider types, TypeAnalyzer typeAnalyzer)
{
ActualProperties output = node.accept(new Visitor(metadata, session, types, typeAnalyzer), inputProperties);
output.getNodePartitioning().ifPresent(partitioning ->
verify(node.getOutputSymbols().containsAll(partitioning.getColumns()), "Node-level partitioning properties contain columns not present in node's output"));
verify(node.getOutputSymbols().containsAll(output.getConstants().keySet()), "Node-level constant properties contain columns not present in node's output");
Set<Symbol> localPropertyColumns = output.getLocalProperties().stream()
.flatMap(property -> property.getColumns().stream())
.collect(Collectors.toSet());
verify(node.getOutputSymbols().containsAll(localPropertyColumns), "Node-level local properties contain columns not present in node's output");
return output;
}
public static ActualProperties streamBackdoorDeriveProperties(PlanNode node, List<ActualProperties> inputProperties, Metadata metadata, Session session, TypeProvider types, TypeAnalyzer typeAnalyzer)
{
return node.accept(new Visitor(metadata, session, types, typeAnalyzer), inputProperties);
}
private static class Visitor
extends PlanVisitor<ActualProperties, List<ActualProperties>>
{
private final Metadata metadata;
private final Session session;
private final TypeProvider types;
private final TypeAnalyzer typeAnalyzer;
public Visitor(Metadata metadata, Session session, TypeProvider types, TypeAnalyzer typeAnalyzer)
{
this.metadata = metadata;
this.session = session;
this.types = types;
this.typeAnalyzer = typeAnalyzer;
}
@Override
protected ActualProperties visitPlan(PlanNode node, List<ActualProperties> inputProperties)
{
throw new UnsupportedOperationException("not yet implemented: " + node.getClass().getName());
}
@Override
public ActualProperties visitExplainAnalyze(ExplainAnalyzeNode node, List<ActualProperties> inputProperties)
{
return ActualProperties.builder()
.global(coordinatorSingleStreamPartition())
.build();
}
@Override
public ActualProperties visitOutput(OutputNode node, List<ActualProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties)
.translate(column -> PropertyDerivations.filterIfMissing(node.getOutputSymbols(), column));
}
@Override
public ActualProperties visitEnforceSingleRow(EnforceSingleRowNode node, List<ActualProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public ActualProperties visitAssignUniqueId(AssignUniqueId node, List<ActualProperties> inputProperties)
{
ActualProperties properties = Iterables.getOnlyElement(inputProperties);
ImmutableList.Builder<LocalProperty<Symbol>> newLocalProperties = ImmutableList.builder();
newLocalProperties.addAll(properties.getLocalProperties());
newLocalProperties.add(new GroupingProperty<>(ImmutableList.of(node.getIdColumn())));
node.getSource().getOutputSymbols().stream()
.forEach(column -> newLocalProperties.add(new ConstantProperty<>(column)));
if (properties.getNodePartitioning().isPresent()) {
// preserve input (possibly preferred) partitioning
return ActualProperties.builderFrom(properties)
.local(newLocalProperties.build())
.build();
}
return ActualProperties.builderFrom(properties)
.global(partitionedOn(ARBITRARY_DISTRIBUTION, ImmutableList.of(node.getIdColumn()), Optional.empty()))
.local(newLocalProperties.build())
.build();
}
@Override
public ActualProperties visitApply(ApplyNode node, List<ActualProperties> inputProperties)
{
throw new IllegalArgumentException("Unexpected node: " + node.getClass().getName());
}
@Override
public ActualProperties visitLateralJoin(LateralJoinNode node, List<ActualProperties> inputProperties)
{
throw new IllegalArgumentException("Unexpected node: " + node.getClass().getName());
}
@Override
public ActualProperties visitMarkDistinct(MarkDistinctNode node, List<ActualProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public ActualProperties visitWindow(WindowNode node, List<ActualProperties> inputProperties)
{
ActualProperties properties = Iterables.getOnlyElement(inputProperties);
// If the input is completely pre-partitioned and sorted, then the original input properties will be respected
Optional<OrderingScheme> orderingScheme = node.getOrderingScheme();
if (ImmutableSet.copyOf(node.getPartitionBy()).equals(node.getPrePartitionedInputs())
&& (!orderingScheme.isPresent() || node.getPreSortedOrderPrefix() == orderingScheme.get().getOrderBy().size())) {
return properties;
}
ImmutableList.Builder<LocalProperty<Symbol>> localProperties = ImmutableList.builder();
// If the WindowNode has pre-partitioned inputs, then it will not change the order of those inputs at output,
// so we should just propagate those underlying local properties that guarantee the pre-partitioning.
// TODO: come up with a more general form of this operation for other streaming operators
if (!node.getPrePartitionedInputs().isEmpty()) {
GroupingProperty<Symbol> prePartitionedProperty = new GroupingProperty<>(node.getPrePartitionedInputs());
for (LocalProperty<Symbol> localProperty : properties.getLocalProperties()) {
if (!prePartitionedProperty.isSimplifiedBy(localProperty)) {
break;
}
localProperties.add(localProperty);
}
}
if (!node.getPartitionBy().isEmpty()) {
localProperties.add(new GroupingProperty<>(node.getPartitionBy()));
}
orderingScheme.ifPresent(scheme ->
scheme.getOrderBy().stream()
.map(column -> new SortingProperty<>(column, scheme.getOrdering(column)))
.forEach(localProperties::add));
return ActualProperties.builderFrom(properties)
.local(LocalProperties.normalizeAndPrune(localProperties.build()))
.build();
}
@Override
public ActualProperties visitGroupId(GroupIdNode node, List<ActualProperties> inputProperties)
{
Map<Symbol, Symbol> inputToOutputMappings = new HashMap<>();
for (Map.Entry<Symbol, Symbol> setMapping : node.getGroupingColumns().entrySet()) {
if (node.getCommonGroupingColumns().contains(setMapping.getKey())) {
// TODO: Add support for translating a property on a single column to multiple columns
// when GroupIdNode is copying a single input grouping column into multiple output grouping columns (i.e. aliases), this is basically picking one arbitrarily
inputToOutputMappings.putIfAbsent(setMapping.getValue(), setMapping.getKey());
}
}
// TODO: Add support for translating a property on a single column to multiple columns
// this is deliberately placed after the grouping columns, because preserving properties has a bigger perf impact
for (Symbol argument : node.getAggregationArguments()) {
inputToOutputMappings.putIfAbsent(argument, argument);
}
return Iterables.getOnlyElement(inputProperties).translate(column -> Optional.ofNullable(inputToOutputMappings.get(column)));
}
@Override
public ActualProperties visitAggregation(AggregationNode node, List<ActualProperties> inputProperties)
{
ActualProperties properties = Iterables.getOnlyElement(inputProperties);
ActualProperties translated = properties.translate(symbol -> node.getGroupingKeys().contains(symbol) ? Optional.of(symbol) : Optional.empty());
return ActualProperties.builderFrom(translated)
.local(LocalProperties.grouped(node.getGroupingKeys()))
.build();
}
@Override
public ActualProperties visitRowNumber(RowNumberNode node, List<ActualProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public ActualProperties visitTopNRowNumber(TopNRowNumberNode node, List<ActualProperties> inputProperties)
{
ActualProperties properties = Iterables.getOnlyElement(inputProperties);
ImmutableList.Builder<LocalProperty<Symbol>> localProperties = ImmutableList.builder();
localProperties.add(new GroupingProperty<>(node.getPartitionBy()));
for (Symbol column : node.getOrderingScheme().getOrderBy()) {
localProperties.add(new SortingProperty<>(column, node.getOrderingScheme().getOrdering(column)));
}
return ActualProperties.builderFrom(properties)
.local(localProperties.build())
.build();
}
@Override
public ActualProperties visitTopN(TopNNode node, List<ActualProperties> inputProperties)
{
ActualProperties properties = Iterables.getOnlyElement(inputProperties);
List<SortingProperty<Symbol>> localProperties = node.getOrderingScheme().getOrderBy().stream()
.map(column -> new SortingProperty<>(column, node.getOrderingScheme().getOrdering(column)))
.collect(toImmutableList());
return ActualProperties.builderFrom(properties)
.local(localProperties)
.build();
}
@Override
public ActualProperties visitSort(SortNode node, List<ActualProperties> inputProperties)
{
ActualProperties properties = Iterables.getOnlyElement(inputProperties);
List<SortingProperty<Symbol>> localProperties = node.getOrderingScheme().getOrderBy().stream()
.map(column -> new SortingProperty<>(column, node.getOrderingScheme().getOrdering(column)))
.collect(toImmutableList());
return ActualProperties.builderFrom(properties)
.local(localProperties)
.build();
}
@Override
public ActualProperties visitLimit(LimitNode node, List<ActualProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public ActualProperties visitDistinctLimit(DistinctLimitNode node, List<ActualProperties> inputProperties)
{
ActualProperties properties = Iterables.getOnlyElement(inputProperties);
return ActualProperties.builderFrom(properties)
.local(LocalProperties.grouped(node.getDistinctSymbols()))
.build();
}
@Override
public ActualProperties visitStatisticsWriterNode(StatisticsWriterNode node, List<ActualProperties> context)
{
return ActualProperties.builder()
.global(coordinatorSingleStreamPartition())
.build();
}
@Override
public ActualProperties visitTableFinish(TableFinishNode node, List<ActualProperties> inputProperties)
{
return ActualProperties.builder()
.global(coordinatorSingleStreamPartition())
.build();
}
@Override
public ActualProperties visitDelete(DeleteNode node, List<ActualProperties> inputProperties)
{
// drop all symbols in property because delete doesn't pass on any of the columns
return Iterables.getOnlyElement(inputProperties).translate(symbol -> Optional.empty());
}
@Override
public ActualProperties visitJoin(JoinNode node, List<ActualProperties> inputProperties)
{
ActualProperties probeProperties = inputProperties.get(0);
ActualProperties buildProperties = inputProperties.get(1);
boolean unordered = spillPossible(session, node.getType());
switch (node.getType()) {
case INNER:
probeProperties = probeProperties.translate(column -> filterOrRewrite(node.getOutputSymbols(), node.getCriteria(), column));
buildProperties = buildProperties.translate(column -> filterOrRewrite(node.getOutputSymbols(), node.getCriteria(), column));
Map<Symbol, NullableValue> constants = new HashMap<>();
constants.putAll(probeProperties.getConstants());
constants.putAll(buildProperties.getConstants());
if (node.isCrossJoin()) {
// Cross join preserves only constants from probe and build sides.
// Cross join doesn't preserve sorting or grouping local properties on either side.
return ActualProperties.builder()
.global(probeProperties)
.local(ImmutableList.of())
.constants(constants)
.build();
}
return ActualProperties.builderFrom(probeProperties)
.constants(constants)
.unordered(unordered)
.build();
case LEFT:
return ActualProperties.builderFrom(probeProperties.translate(column -> filterIfMissing(node.getOutputSymbols(), column)))
.unordered(unordered)
.build();
case RIGHT:
buildProperties = buildProperties.translate(column -> filterIfMissing(node.getOutputSymbols(), column));
return ActualProperties.builderFrom(buildProperties.translate(column -> filterIfMissing(node.getOutputSymbols(), column)))
.local(ImmutableList.of())
.unordered(true)
.build();
case FULL:
// We can't say anything about the partitioning scheme because any partition of
// a hash-partitioned join can produce nulls in case of a lack of matches
return ActualProperties.builder()
.global(probeProperties.isSingleNode() ? singleStreamPartition() : arbitraryPartition())
.build();
default:
throw new UnsupportedOperationException("Unsupported join type: " + node.getType());
}
}
@Override
public ActualProperties visitSemiJoin(SemiJoinNode node, List<ActualProperties> inputProperties)
{
return inputProperties.get(0);
}
@Override
public ActualProperties visitSpatialJoin(SpatialJoinNode node, List<ActualProperties> inputProperties)
{
ActualProperties probeProperties = inputProperties.get(0);
ActualProperties buildProperties = inputProperties.get(1);
switch (node.getType()) {
case INNER:
probeProperties = probeProperties.translate(column -> filterIfMissing(node.getOutputSymbols(), column));
buildProperties = buildProperties.translate(column -> filterIfMissing(node.getOutputSymbols(), column));
Map<Symbol, NullableValue> constants = new HashMap<>();
constants.putAll(probeProperties.getConstants());
constants.putAll(buildProperties.getConstants());
return ActualProperties.builderFrom(probeProperties)
.constants(constants)
.build();
case LEFT:
return ActualProperties.builderFrom(probeProperties.translate(column -> filterIfMissing(node.getOutputSymbols(), column)))
.build();
default:
throw new IllegalArgumentException("Unsupported spatial join type: " + node.getType());
}
}
@Override
public ActualProperties visitIndexJoin(IndexJoinNode node, List<ActualProperties> inputProperties)
{
// TODO: include all equivalent columns in partitioning properties
ActualProperties probeProperties = inputProperties.get(0);
ActualProperties indexProperties = inputProperties.get(1);
switch (node.getType()) {
case INNER:
return ActualProperties.builderFrom(probeProperties)
.constants(ImmutableMap.<Symbol, NullableValue>builder()
.putAll(probeProperties.getConstants())
.putAll(indexProperties.getConstants())
.build())
.build();
case SOURCE_OUTER:
return ActualProperties.builderFrom(probeProperties)
.constants(probeProperties.getConstants())
.build();
default:
throw new UnsupportedOperationException("Unsupported join type: " + node.getType());
}
}
@Override
public ActualProperties visitIndexSource(IndexSourceNode node, List<ActualProperties> context)
{
return ActualProperties.builder()
.global(singleStreamPartition())
.build();
}
public static Map<Symbol, Symbol> exchangeInputToOutput(ExchangeNode node, int sourceIndex)
{
List<Symbol> inputSymbols = node.getInputs().get(sourceIndex);
Map<Symbol, Symbol> inputToOutput = new HashMap<>();
for (int i = 0; i < node.getOutputSymbols().size(); i++) {
inputToOutput.put(inputSymbols.get(i), node.getOutputSymbols().get(i));
}
return inputToOutput;
}
@Override
public ActualProperties visitExchange(ExchangeNode node, List<ActualProperties> inputProperties)
{
checkArgument(node.getScope() != REMOTE || inputProperties.stream().noneMatch(ActualProperties::isNullsAndAnyReplicated), "Null-and-any replicated inputs should not be remotely exchanged");
Set<Map.Entry<Symbol, NullableValue>> entries = null;
for (int sourceIndex = 0; sourceIndex < node.getSources().size(); sourceIndex++) {
Map<Symbol, Symbol> inputToOutput = exchangeInputToOutput(node, sourceIndex);
ActualProperties translated = inputProperties.get(sourceIndex).translate(symbol -> Optional.ofNullable(inputToOutput.get(symbol)));
entries = (entries == null) ? translated.getConstants().entrySet() : Sets.intersection(entries, translated.getConstants().entrySet());
}
checkState(entries != null);
Map<Symbol, NullableValue> constants = entries.stream()
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
ImmutableList.Builder<SortingProperty<Symbol>> localProperties = ImmutableList.builder();
if (node.getOrderingScheme().isPresent()) {
node.getOrderingScheme().get().getOrderBy().stream()
.map(column -> new SortingProperty<>(column, node.getOrderingScheme().get().getOrdering(column)))
.forEach(localProperties::add);
}
// Local exchanges are only created in AddLocalExchanges, at the end of optimization, and
// local exchanges do not produce all global properties as represented by ActualProperties.
// This is acceptable because AddLocalExchanges does not use global properties and is only
// interested in the local properties.
// However, for the purpose of validation, some global properties (single-node vs distributed)
// are computed for local exchanges.
// TODO: implement full properties for local exchanges
if (node.getScope() == LOCAL) {
ActualProperties.Builder builder = ActualProperties.builder();
builder.local(localProperties.build());
builder.constants(constants);
if (inputProperties.stream().anyMatch(ActualProperties::isCoordinatorOnly)) {
builder.global(coordinatorSingleStreamPartition());
}
else if (inputProperties.stream().anyMatch(ActualProperties::isSingleNode)) {
builder.global(coordinatorSingleStreamPartition());
}
return builder.build();
}
switch (node.getType()) {
case GATHER:
boolean coordinatorOnly = node.getPartitioningScheme().getPartitioning().getHandle().isCoordinatorOnly();
return ActualProperties.builder()
.global(coordinatorOnly ? coordinatorSingleStreamPartition() : singleStreamPartition())
.local(localProperties.build())
.constants(constants)
.build();
case REPARTITION:
return ActualProperties.builder()
.global(partitionedOn(
node.getPartitioningScheme().getPartitioning(),
Optional.of(node.getPartitioningScheme().getPartitioning()))
.withReplicatedNulls(node.getPartitioningScheme().isReplicateNullsAndAny()))
.constants(constants)
.build();
case REPLICATE:
// TODO: this should have the same global properties as the stream taking the replicated data
return ActualProperties.builder()
.global(arbitraryPartition())
.constants(constants)
.build();
}
throw new UnsupportedOperationException("not yet implemented");
}
@Override
public ActualProperties visitFilter(FilterNode node, List<ActualProperties> inputProperties)
{
ActualProperties properties = Iterables.getOnlyElement(inputProperties);
DomainTranslator.ExtractionResult decomposedPredicate = DomainTranslator.fromPredicate(
metadata,
session,
node.getPredicate(),
types);
Map<Symbol, NullableValue> constants = new HashMap<>(properties.getConstants());
constants.putAll(extractFixedValues(decomposedPredicate.getTupleDomain()).orElse(ImmutableMap.of()));
return ActualProperties.builderFrom(properties)
.constants(constants)
.build();
}
@Override
public ActualProperties visitProject(ProjectNode node, List<ActualProperties> inputProperties)
{
ActualProperties properties = Iterables.getOnlyElement(inputProperties);
Map<Symbol, Symbol> identities = computeIdentityTranslations(node.getAssignments().getMap());
ActualProperties translatedProperties = properties.translate(column -> Optional.ofNullable(identities.get(column)), expression -> rewriteExpression(node.getAssignments().getMap(), expression));
// Extract additional constants
Map<Symbol, NullableValue> constants = new HashMap<>();
for (Map.Entry<Symbol, Expression> assignment : node.getAssignments().entrySet()) {
Expression expression = assignment.getValue();
Map<NodeRef<Expression>, Type> expressionTypes = typeAnalyzer.getTypes(session, types, expression);
Type type = requireNonNull(expressionTypes.get(NodeRef.of(expression)));
ExpressionInterpreter optimizer = ExpressionInterpreter.expressionOptimizer(expression, metadata, session, expressionTypes);
// TODO:
// We want to use a symbol resolver that looks up in the constants from the input subplan
// to take advantage of constant-folding for complex expressions
// However, that currently causes errors when those expressions operate on arrays or row types
// ("ROW comparison not supported for fields with null elements", etc)
Object value = optimizer.optimize(NoOpSymbolResolver.INSTANCE);
if (value instanceof SymbolReference) {
Symbol symbol = Symbol.from((SymbolReference) value);
NullableValue existingConstantValue = constants.get(symbol);
if (existingConstantValue != null) {
constants.put(assignment.getKey(), new NullableValue(type, value));
}
}
else if (!(value instanceof Expression)) {
constants.put(assignment.getKey(), new NullableValue(type, value));
}
}
constants.putAll(translatedProperties.getConstants());
return ActualProperties.builderFrom(translatedProperties)
.constants(constants)
.build();
}
@Override
public ActualProperties visitTableWriter(TableWriterNode node, List<ActualProperties> inputProperties)
{
ActualProperties properties = Iterables.getOnlyElement(inputProperties);
if (properties.isCoordinatorOnly()) {
return ActualProperties.builder()
.global(coordinatorSingleStreamPartition())
.build();
}
return ActualProperties.builder()
.global(properties.isSingleNode() ? singleStreamPartition() : arbitraryPartition())
.build();
}
@Override
public ActualProperties visitSample(SampleNode node, List<ActualProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public ActualProperties visitUnnest(UnnestNode node, List<ActualProperties> inputProperties)
{
Set<Symbol> passThroughInputs = ImmutableSet.copyOf(node.getReplicateSymbols());
return Iterables.getOnlyElement(inputProperties).translate(column -> {
if (passThroughInputs.contains(column)) {
return Optional.of(column);
}
return Optional.empty();
});
}
@Override
public ActualProperties visitValues(ValuesNode node, List<ActualProperties> context)
{
return ActualProperties.builder()
.global(singleStreamPartition())
.build();
}
@Override
public ActualProperties visitTableScan(TableScanNode node, List<ActualProperties> inputProperties)
{
TableProperties layout = metadata.getTableProperties(session, node.getTable());
Map<ColumnHandle, Symbol> assignments = ImmutableBiMap.copyOf(node.getAssignments()).inverse();
ActualProperties.Builder properties = ActualProperties.builder();
// Globally constant assignments
Map<ColumnHandle, NullableValue> globalConstants = new HashMap<>();
extractFixedValues(metadata.getTableProperties(session, node.getTable()).getPredicate())
.orElse(ImmutableMap.of())
.entrySet().stream()
.filter(entry -> !entry.getValue().isNull())
.forEach(entry -> globalConstants.put(entry.getKey(), entry.getValue()));
Map<Symbol, NullableValue> symbolConstants = globalConstants.entrySet().stream()
.filter(entry -> assignments.containsKey(entry.getKey()))
.collect(toMap(entry -> assignments.get(entry.getKey()), Map.Entry::getValue));
properties.constants(symbolConstants);
// Partitioning properties
properties.global(deriveGlobalProperties(layout, assignments, globalConstants));
// Append the global constants onto the local properties to maximize their translation potential
List<LocalProperty<ColumnHandle>> constantAppendedLocalProperties = ImmutableList.<LocalProperty<ColumnHandle>>builder()
.addAll(globalConstants.keySet().stream().map(ConstantProperty::new).iterator())
.addAll(layout.getLocalProperties())
.build();
properties.local(LocalProperties.translate(constantAppendedLocalProperties, column -> Optional.ofNullable(assignments.get(column))));
return properties.build();
}
private Global deriveGlobalProperties(TableProperties layout, Map<ColumnHandle, Symbol> assignments, Map<ColumnHandle, NullableValue> constants)
{
Optional<List<Symbol>> streamPartitioning = layout.getStreamPartitioningColumns()
.flatMap(columns -> translateToNonConstantSymbols(columns, assignments, constants));
if (planWithTableNodePartitioning(session) && layout.getTablePartitioning().isPresent()) {
TablePartitioning tablePartitioning = layout.getTablePartitioning().get();
if (assignments.keySet().containsAll(tablePartitioning.getPartitioningColumns())) {
List<Symbol> arguments = tablePartitioning.getPartitioningColumns().stream()
.map(assignments::get)
.collect(toImmutableList());
return partitionedOn(tablePartitioning.getPartitioningHandle(), arguments, streamPartitioning);
}
}
if (streamPartitioning.isPresent()) {
return streamPartitionedOn(streamPartitioning.get());
}
return arbitraryPartition();
}
private static Optional<List<Symbol>> translateToNonConstantSymbols(
Set<ColumnHandle> columnHandles,
Map<ColumnHandle, Symbol> assignments,
Map<ColumnHandle, NullableValue> globalConstants)
{
// Strip off the constants from the partitioning columns (since those are not required for translation)
Set<ColumnHandle> constantsStrippedColumns = columnHandles.stream()
.filter(column -> !globalConstants.containsKey(column))
.collect(toImmutableSet());
ImmutableSet.Builder<Symbol> builder = ImmutableSet.builder();
for (ColumnHandle column : constantsStrippedColumns) {
Symbol translated = assignments.get(column);
if (translated == null) {
return Optional.empty();
}
builder.add(translated);
}
return Optional.of(ImmutableList.copyOf(builder.build()));
}
private static Map<Symbol, Symbol> computeIdentityTranslations(Map<Symbol, Expression> assignments)
{
Map<Symbol, Symbol> inputToOutput = new HashMap<>();
for (Map.Entry<Symbol, Expression> assignment : assignments.entrySet()) {
if (assignment.getValue() instanceof SymbolReference) {
inputToOutput.put(Symbol.from(assignment.getValue()), assignment.getKey());
}
}
return inputToOutput;
}
}
static boolean spillPossible(Session session, JoinNode.Type joinType)
{
if (!SystemSessionProperties.isSpillEnabled(session)) {
return false;
}
switch (joinType) {
case INNER:
case LEFT:
// Even though join might not have "spillable" property set yet
// it might still be set as spillable later on by AddLocalExchanges.
return true;
case RIGHT:
case FULL:
// Currently there is no spill support for outer on the build side.
return false;
default:
throw new IllegalStateException("Unknown join type: " + joinType);
}
}
public static Optional<Symbol> filterIfMissing(Collection<Symbol> columns, Symbol column)
{
if (columns.contains(column)) {
return Optional.of(column);
}
return Optional.empty();
}
// Used to filter columns that are not exposed by join node
// Or, if they are part of the equalities, to translate them
// to the other symbol if that's exposed, instead.
public static Optional<Symbol> filterOrRewrite(Collection<Symbol> columns, Collection<JoinNode.EquiJoinClause> equalities, Symbol column)
{
// symbol is exposed directly, so no translation needed
if (columns.contains(column)) {
return Optional.of(column);
}
// if the column is part of the equality conditions and its counterpart
// is exposed, use that, instead
for (JoinNode.EquiJoinClause equality : equalities) {
if (equality.getLeft().equals(column) && columns.contains(equality.getRight())) {
return Optional.of(equality.getRight());
}
else if (equality.getRight().equals(column) && columns.contains(equality.getLeft())) {
return Optional.of(equality.getLeft());
}
}
return Optional.empty();
}
private static Optional<Symbol> rewriteExpression(Map<Symbol, Expression> assignments, Expression expression)
{
// Only simple coalesce expressions supported currently
if (!(expression instanceof CoalesceExpression)) {
return Optional.empty();
}
Set<Expression> arguments = ImmutableSet.copyOf(((CoalesceExpression) expression).getOperands());
if (!arguments.stream().allMatch(SymbolReference.class::isInstance)) {
return Optional.empty();
}
// We are using the property that the result of coalesce from full outer join keys would not be null despite of the order
// of the arguments. Thus we extract and compare the symbols of the CoalesceExpression as a set rather than compare the
// CoalesceExpression directly.
for (Map.Entry<Symbol, Expression> entry : assignments.entrySet()) {
if (entry.getValue() instanceof CoalesceExpression) {
Set<Expression> candidateArguments = ImmutableSet.copyOf(((CoalesceExpression) entry.getValue()).getOperands());
if (!candidateArguments.stream().allMatch(SymbolReference.class::isInstance)) {
return Optional.empty();
}
if (candidateArguments.equals(arguments)) {
return Optional.of(entry.getKey());
}
}
}
return Optional.empty();
}
}
| |
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.*;
public class AtaqueIntercaladoTest {
@Before
public void tearDown() {
System.gc();
System.runFinalization();
}
private ArrayList<Dwarf> factoryDwarfs(int anoes) {
ArrayList<Dwarf> dwarfs = new ArrayList<>();
for (int i = 0; i < anoes; i++) {
dwarfs.add(new Dwarf("Anao" + i));
}
return dwarfs;
}
private ExercitoDeElfos factoryElfosVerdesPrimeiro(int elfosVerdes, int elfosNoturnos) {
ExercitoDeElfos exercitoDeElfos = new ExercitoDeElfos();
for (int i = 0; i < elfosVerdes; i++)
exercitoDeElfos.alistaElfo(new ElfoVerde("ElfoVerde" + i));
for (int i = 0; i < elfosNoturnos; i++)
exercitoDeElfos.alistaElfo(new ElfoNoturno("ElfoNortuno" + i));
return exercitoDeElfos;
}
private ExercitoDeElfos factoryElfosNoturnosPrimeiro(int elfosNoturnos, int elfosVerdes) {
ExercitoDeElfos exercitoDeElfos = new ExercitoDeElfos();
for (int i = 0; i < elfosNoturnos; i++)
exercitoDeElfos.alistaElfo(new ElfoNoturno("ElfoNoturno" + i));
for (int i = 0; i < elfosVerdes; i++)
exercitoDeElfos.alistaElfo(new ElfoVerde("ElfoVerde" + i));
return exercitoDeElfos;
}
private void mataElfos(ExercitoDeElfos exercitoDeElfos, int mataQuantos) {
DataTerceiraEra dateTerceiraEra = new DataTerceiraEra(1, 1, 2000);
Dwarf dwarf = new Dwarf("Imortal", dateTerceiraEra);
for (Elfo elfo : exercitoDeElfos.getExercito().values()) {
if (mataQuantos-- <= 0)
break;
for (int i = 0; i < 92; i++)
elfo.atirarFlecha(dwarf);
}
}
private boolean ataqueFoiIntercalado(ArrayList<Elfo> elfos) {
Elfo previous = new Elfo("testador");
for (Elfo elfo : elfos)
if (elfo.getClass().equals(previous.getClass()))
return false;
return true;
}
@Test
public void dezElfosAtacam6Anoes() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(7, 3);
ArrayList<Dwarf> dwarfs = factoryDwarfs(6);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void dezElfosNoturnosAtacam6Anoes() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(0, 10);
ArrayList<Dwarf> dwarfs = factoryDwarfs(6);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void cincoElfosVerdesCincoElfoNoturnoAtacaUmAnao() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(5, 5);
ArrayList<Dwarf> dwarfs = factoryDwarfs(1);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ataqueFoiIntercalado(ordemAtaque));
}
@Test
public void passaListaDwarfNulo() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(5, 5);
ArrayList<Dwarf> dwarfs = null;
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ataqueFoiIntercalado(ordemAtaque));
}
@Test
public void passaExercitoVazio() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(0, 0);
ArrayList<Dwarf> dwarfs = factoryDwarfs(1);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void passa0Anoes() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(10, 10);
ArrayList<Dwarf> dwarfs = factoryDwarfs(0);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void passaUmElfoNulo() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(7, 3);
exercitoDeElfos.alistaElfo(null);
ArrayList<Dwarf> dwarfs = factoryDwarfs(5);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void duzentosElfosNoturnosTentamAtacar() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(1, 200);
ArrayList<Dwarf> dwarfs = factoryDwarfs(1);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void intercalaElfoNoturnoAtaca() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(1, 1);
ArrayList<Dwarf> dwarfs = factoryDwarfs(1);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ataqueFoiIntercalado(ordemAtaque));
}
@Test
public void zeroElfosVerdes() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(0, 5);
ArrayList<Dwarf> dwarfs = factoryDwarfs(5);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void zeroElfosNoturnos() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(5, 0);
ArrayList<Dwarf> dwarfs = factoryDwarfs(5);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void cincoElfosMortos() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(10, 15);
mataElfos(exercitoDeElfos, 5);
ArrayList<Dwarf> dwarfs = factoryDwarfs(5);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ataqueFoiIntercalado(ordemAtaque));
}
@Test
public void todosElfosMortos() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosVerdesPrimeiro(0, 5);
mataElfos(exercitoDeElfos, 5);
ArrayList<Dwarf> dwarfs = factoryDwarfs(5);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void dezElfosAtacam6AnoesNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(3, 7);
ArrayList<Dwarf> dwarfs = factoryDwarfs(6);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void dezElfosNoturnosAtacam6AnoesNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(10, 0);
ArrayList<Dwarf> dwarfs = factoryDwarfs(6);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void cincoElfosVerdesCincoElfoNoturnoAtacaUmAnaoNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(5, 5);
ArrayList<Dwarf> dwarfs = factoryDwarfs(1);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ataqueFoiIntercalado(ordemAtaque));
}
@Test
public void passaListaDwarfNuloNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(5, 5);
ArrayList<Dwarf> dwarfs = null;
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ataqueFoiIntercalado(ordemAtaque));
}
@Test
public void passaExercitoVazioNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(0, 0);
ArrayList<Dwarf> dwarfs = factoryDwarfs(1);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void passa0AnoesNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(10, 10);
ArrayList<Dwarf> dwarfs = factoryDwarfs(0);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void passaUmElfoNuloNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(3, 7);
exercitoDeElfos.alistaElfo(null);
ArrayList<Dwarf> dwarfs = factoryDwarfs(5);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void duzentosElfosNoturnosTentamAtacarNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(200, 1);
ArrayList<Dwarf> dwarfs = factoryDwarfs(1);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void intercalaElfoNoturnoAtacaNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(1, 1);
ArrayList<Dwarf> dwarfs = factoryDwarfs(1);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ataqueFoiIntercalado(ordemAtaque));
}
@Test
public void zeroElfosVerdesNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(5, 0);
ArrayList<Dwarf> dwarfs = factoryDwarfs(5);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void zeroElfosNoturnosNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(0, 5);
ArrayList<Dwarf> dwarfs = factoryDwarfs(5);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
@Test
public void cincoElfosMortosNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(15, 10);
mataElfos(exercitoDeElfos, 5);
ArrayList<Dwarf> dwarfs = factoryDwarfs(5);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ataqueFoiIntercalado(ordemAtaque));
}
@Test
public void todosElfosMortosNoturnosPrimeiro() {
Estrategia strat = new AtaqueIntercalado();
ExercitoDeElfos exercitoDeElfos = factoryElfosNoturnosPrimeiro(5, 0);
mataElfos(exercitoDeElfos, 5);
ArrayList<Dwarf> dwarfs = factoryDwarfs(5);
exercitoDeElfos.agrupaPorStatus();
strat.atacar(exercitoDeElfos.getExercitoAgrupado(), dwarfs);
ArrayList<Elfo> ordemAtaque = strat.getOrdemDoUltimoAtaque();
assertTrue(ordemAtaque.isEmpty());
}
}
| |
/*
Copyright (c) 2011+, HL7, Inc
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
package org.hl7.fhir.utilities.xml;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.List;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.utilities.Utilities;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.ls.DOMImplementationLS;
import org.w3c.dom.ls.LSSerializer;
import org.xml.sax.SAXException;
public class XMLUtil {
public static final String SPACE_CHAR = "\u00A0";
public static boolean isNMToken(String name) {
if (name == null)
return false;
for (int i = 0; i < name.length(); i++)
if (!isNMTokenChar(name.charAt(i)))
return false;
return name.length() > 0;
}
public static boolean isNMTokenChar(char c) {
return isLetter(c) || isDigit(c) || c == '.' || c == '-' || c == '_' || c == ':' || isCombiningChar(c) || isExtender(c);
}
private static boolean isDigit(char c) {
return (c >= '\u0030' && c <= '\u0039') || (c >= '\u0660' && c <= '\u0669') || (c >= '\u06F0' && c <= '\u06F9') ||
(c >= '\u0966' && c <= '\u096F') || (c >= '\u09E6' && c <= '\u09EF') || (c >= '\u0A66' && c <= '\u0A6F') ||
(c >= '\u0AE6' && c <= '\u0AEF') || (c >= '\u0B66' && c <= '\u0B6F') || (c >= '\u0BE7' && c <= '\u0BEF') ||
(c >= '\u0C66' && c <= '\u0C6F') || (c >= '\u0CE6' && c <= '\u0CEF') || (c >= '\u0D66' && c <= '\u0D6F') ||
(c >= '\u0E50' && c <= '\u0E59') || (c >= '\u0ED0' && c <= '\u0ED9') || (c >= '\u0F20' && c <= '\u0F29');
}
private static boolean isCombiningChar(char c) {
return (c >= '\u0300' && c <= '\u0345') || (c >= '\u0360' && c <= '\u0361') || (c >= '\u0483' && c <= '\u0486') ||
(c >= '\u0591' && c <= '\u05A1') || (c >= '\u05A3' && c <= '\u05B9') || (c >= '\u05BB' && c <= '\u05BD') ||
c == '\u05BF' || (c >= '\u05C1' && c <= '\u05C2') || c == '\u05C4' || (c >= '\u064B' && c <= '\u0652') ||
c == '\u0670' || (c >= '\u06D6' && c <= '\u06DC') || (c >= '\u06DD' && c <= '\u06DF') || (c >= '\u06E0' && c <= '\u06E4') ||
(c >= '\u06E7' && c <= '\u06E8') || (c >= '\u06EA' && c <= '\u06ED') || (c >= '\u0901' && c <= '\u0903') || c == '\u093C' ||
(c >= '\u093E' && c <= '\u094C') || c == '\u094D' || (c >= '\u0951' && c <= '\u0954') || (c >= '\u0962' && c <= '\u0963') ||
(c >= '\u0981' && c <= '\u0983') || c == '\u09BC' || c == '\u09BE' || c == '\u09BF' || (c >= '\u09C0' && c <= '\u09C4') ||
(c >= '\u09C7' && c <= '\u09C8') || (c >= '\u09CB' && c <= '\u09CD') || c == '\u09D7' || (c >= '\u09E2' && c <= '\u09E3') ||
c == '\u0A02' || c == '\u0A3C' || c == '\u0A3E' || c == '\u0A3F' || (c >= '\u0A40' && c <= '\u0A42') ||
(c >= '\u0A47' && c <= '\u0A48') || (c >= '\u0A4B' && c <= '\u0A4D') || (c >= '\u0A70' && c <= '\u0A71') ||
(c >= '\u0A81' && c <= '\u0A83') || c == '\u0ABC' || (c >= '\u0ABE' && c <= '\u0AC5') || (c >= '\u0AC7' && c <= '\u0AC9') ||
(c >= '\u0ACB' && c <= '\u0ACD') || (c >= '\u0B01' && c <= '\u0B03') || c == '\u0B3C' || (c >= '\u0B3E' && c <= '\u0B43') ||
(c >= '\u0B47' && c <= '\u0B48') || (c >= '\u0B4B' && c <= '\u0B4D') || (c >= '\u0B56' && c <= '\u0B57') ||
(c >= '\u0B82' && c <= '\u0B83') || (c >= '\u0BBE' && c <= '\u0BC2') || (c >= '\u0BC6' && c <= '\u0BC8') ||
(c >= '\u0BCA' && c <= '\u0BCD') || c == '\u0BD7' || (c >= '\u0C01' && c <= '\u0C03') || (c >= '\u0C3E' && c <= '\u0C44') ||
(c >= '\u0C46' && c <= '\u0C48') || (c >= '\u0C4A' && c <= '\u0C4D') || (c >= '\u0C55' && c <= '\u0C56') ||
(c >= '\u0C82' && c <= '\u0C83') || (c >= '\u0CBE' && c <= '\u0CC4') || (c >= '\u0CC6' && c <= '\u0CC8') ||
(c >= '\u0CCA' && c <= '\u0CCD') || (c >= '\u0CD5' && c <= '\u0CD6') || (c >= '\u0D02' && c <= '\u0D03') ||
(c >= '\u0D3E' && c <= '\u0D43') || (c >= '\u0D46' && c <= '\u0D48') || (c >= '\u0D4A' && c <= '\u0D4D') || c == '\u0D57' ||
c == '\u0E31' || (c >= '\u0E34' && c <= '\u0E3A') || (c >= '\u0E47' && c <= '\u0E4E') || c == '\u0EB1' ||
(c >= '\u0EB4' && c <= '\u0EB9') || (c >= '\u0EBB' && c <= '\u0EBC') || (c >= '\u0EC8' && c <= '\u0ECD') ||
(c >= '\u0F18' && c <= '\u0F19') || c == '\u0F35' || c == '\u0F37' || c == '\u0F39' || c == '\u0F3E' || c == '\u0F3F' ||
(c >= '\u0F71' && c <= '\u0F84') || (c >= '\u0F86' && c <= '\u0F8B') || (c >= '\u0F90' && c <= '\u0F95') || c == '\u0F97' ||
(c >= '\u0F99' && c <= '\u0FAD') || (c >= '\u0FB1' && c <= '\u0FB7') || c == '\u0FB9' || (c >= '\u20D0' && c <= '\u20DC') ||
c == '\u20E1' || (c >= '\u302A' && c <= '\u302F') || c == '\u3099' || c == '\u309A';
}
private static boolean isExtender(char c) {
return c == '\u00B7' || c == '\u02D0' || c == '\u02D1' || c == '\u0387' || c == '\u0640' || c == '\u0E46' ||
c == '\u0EC6' || c == '\u3005' || (c >= '\u3031' && c <= '\u3035') || (c >= '\u309D' && c <= '\u309E') ||
(c >= '\u30FC' && c <= '\u30FE');
}
private static boolean isLetter(char c) {
return isBaseChar(c) || isIdeographic(c);
}
private static boolean isBaseChar(char c) {
return (c >= '\u0041' && c <= '\u005A') || (c >= '\u0061' && c <= '\u007A') || (c >= '\u00C0' && c <= '\u00D6') ||
(c >= '\u00D8' && c <= '\u00F6') || (c >= '\u00F8' && c <= '\u00FF') || (c >= '\u0100' && c <= '\u0131') ||
(c >= '\u0134' && c <= '\u013E') || (c >= '\u0141' && c <= '\u0148') || (c >= '\u014A' && c <= '\u017E') ||
(c >= '\u0180' && c <= '\u01C3') || (c >= '\u01CD' && c <= '\u01F0') || (c >= '\u01F4' && c <= '\u01F5') ||
(c >= '\u01FA' && c <= '\u0217') || (c >= '\u0250' && c <= '\u02A8') || (c >= '\u02BB' && c <= '\u02C1') ||
c == '\u0386' || (c >= '\u0388' && c <= '\u038A') || c == '\u038C' || (c >= '\u038E' && c <= '\u03A1') ||
(c >= '\u03A3' && c <= '\u03CE') || (c >= '\u03D0' && c <= '\u03D6') || c == '\u03DA' || c == '\u03DC' || c == '\u03DE' ||
c == '\u03E0' || (c >= '\u03E2' && c <= '\u03F3') || (c >= '\u0401' && c <= '\u040C') || (c >= '\u040E' && c <= '\u044F') ||
(c >= '\u0451' && c <= '\u045C') || (c >= '\u045E' && c <= '\u0481') || (c >= '\u0490' && c <= '\u04C4') ||
(c >= '\u04C7' && c <= '\u04C8') || (c >= '\u04CB' && c <= '\u04CC') || (c >= '\u04D0' && c <= '\u04EB') ||
(c >= '\u04EE' && c <= '\u04F5') || (c >= '\u04F8' && c <= '\u04F9') || (c >= '\u0531' && c <= '\u0556') ||
c == '\u0559' || (c >= '\u0561' && c <= '\u0586') || (c >= '\u05D0' && c <= '\u05EA') || (c >= '\u05F0' && c <= '\u05F2') ||
(c >= '\u0621' && c <= '\u063A') || (c >= '\u0641' && c <= '\u064A') || (c >= '\u0671' && c <= '\u06B7') ||
(c >= '\u06BA' && c <= '\u06BE') || (c >= '\u06C0' && c <= '\u06CE') || (c >= '\u06D0' && c <= '\u06D3') ||
c == '\u06D5' || (c >= '\u06E5' && c <= '\u06E6') || (c >= '\u0905' && c <= '\u0939') || c == '\u093D' ||
(c >= '\u0958' && c <= '\u0961') || (c >= '\u0985' && c <= '\u098C') || (c >= '\u098F' && c <= '\u0990') ||
(c >= '\u0993' && c <= '\u09A8') || (c >= '\u09AA' && c <= '\u09B0') || c == '\u09B2' ||
(c >= '\u09B6' && c <= '\u09B9') || (c >= '\u09DC' && c <= '\u09DD') || (c >= '\u09DF' && c <= '\u09E1') ||
(c >= '\u09F0' && c <= '\u09F1') || (c >= '\u0A05' && c <= '\u0A0A') || (c >= '\u0A0F' && c <= '\u0A10') ||
(c >= '\u0A13' && c <= '\u0A28') || (c >= '\u0A2A' && c <= '\u0A30') || (c >= '\u0A32' && c <= '\u0A33') ||
(c >= '\u0A35' && c <= '\u0A36') || (c >= '\u0A38' && c <= '\u0A39') || (c >= '\u0A59' && c <= '\u0A5C') ||
c == '\u0A5E' || (c >= '\u0A72' && c <= '\u0A74') || (c >= '\u0A85' && c <= '\u0A8B') || c == '\u0A8D' ||
(c >= '\u0A8F' && c <= '\u0A91') || (c >= '\u0A93' && c <= '\u0AA8') || (c >= '\u0AAA' && c <= '\u0AB0') ||
(c >= '\u0AB2' && c <= '\u0AB3') || (c >= '\u0AB5' && c <= '\u0AB9') || c == '\u0ABD' || c == '\u0AE0' ||
(c >= '\u0B05' && c <= '\u0B0C') || (c >= '\u0B0F' && c <= '\u0B10') || (c >= '\u0B13' && c <= '\u0B28') ||
(c >= '\u0B2A' && c <= '\u0B30') || (c >= '\u0B32' && c <= '\u0B33') || (c >= '\u0B36' && c <= '\u0B39') ||
c == '\u0B3D' || (c >= '\u0B5C' && c <= '\u0B5D') || (c >= '\u0B5F' && c <= '\u0B61') ||
(c >= '\u0B85' && c <= '\u0B8A') || (c >= '\u0B8E' && c <= '\u0B90') || (c >= '\u0B92' && c <= '\u0B95') ||
(c >= '\u0B99' && c <= '\u0B9A') || c == '\u0B9C' || (c >= '\u0B9E' && c <= '\u0B9F') ||
(c >= '\u0BA3' && c <= '\u0BA4') || (c >= '\u0BA8' && c <= '\u0BAA') || (c >= '\u0BAE' && c <= '\u0BB5') ||
(c >= '\u0BB7' && c <= '\u0BB9') || (c >= '\u0C05' && c <= '\u0C0C') || (c >= '\u0C0E' && c <= '\u0C10') ||
(c >= '\u0C12' && c <= '\u0C28') || (c >= '\u0C2A' && c <= '\u0C33') || (c >= '\u0C35' && c <= '\u0C39') ||
(c >= '\u0C60' && c <= '\u0C61') || (c >= '\u0C85' && c <= '\u0C8C') || (c >= '\u0C8E' && c <= '\u0C90') ||
(c >= '\u0C92' && c <= '\u0CA8') || (c >= '\u0CAA' && c <= '\u0CB3') || (c >= '\u0CB5' && c <= '\u0CB9') ||
c == '\u0CDE' || (c >= '\u0CE0' && c <= '\u0CE1') || (c >= '\u0D05' && c <= '\u0D0C') ||
(c >= '\u0D0E' && c <= '\u0D10') || (c >= '\u0D12' && c <= '\u0D28') || (c >= '\u0D2A' && c <= '\u0D39') ||
(c >= '\u0D60' && c <= '\u0D61') || (c >= '\u0E01' && c <= '\u0E2E') || c == '\u0E30' ||
(c >= '\u0E32' && c <= '\u0E33') || (c >= '\u0E40' && c <= '\u0E45') || (c >= '\u0E81' && c <= '\u0E82') ||
c == '\u0E84' || (c >= '\u0E87' && c <= '\u0E88') || c == '\u0E8A' || c == '\u0E8D' || (c >= '\u0E94' && c <= '\u0E97') ||
(c >= '\u0E99' && c <= '\u0E9F') || (c >= '\u0EA1' && c <= '\u0EA3') || c == '\u0EA5' || c == '\u0EA7' ||
(c >= '\u0EAA' && c <= '\u0EAB') || (c >= '\u0EAD' && c <= '\u0EAE') || c == '\u0EB0' ||
(c >= '\u0EB2' && c <= '\u0EB3') || c == '\u0EBD' || (c >= '\u0EC0' && c <= '\u0EC4') ||
(c >= '\u0F40' && c <= '\u0F47') || (c >= '\u0F49' && c <= '\u0F69') || (c >= '\u10A0' && c <= '\u10C5') ||
(c >= '\u10D0' && c <= '\u10F6') || c == '\u1100' || (c >= '\u1102' && c <= '\u1103') ||
(c >= '\u1105' && c <= '\u1107') || c == '\u1109' || (c >= '\u110B' && c <= '\u110C') ||
(c >= '\u110E' && c <= '\u1112') || c == '\u113C' || c == '\u113E' || c == '\u1140' || c == '\u114C' ||
c == '\u114E' || c == '\u1150' || (c >= '\u1154' && c <= '\u1155') || c == '\u1159' ||
(c >= '\u115F' && c <= '\u1161') || c == '\u1163' || c == '\u1165' || c == '\u1167' || c == '\u1169' ||
(c >= '\u116D' && c <= '\u116E') || (c >= '\u1172' && c <= '\u1173') || c == '\u1175' ||
c == '\u119E' || c == '\u11A8' || c == '\u11AB' || (c >= '\u11AE' && c <= '\u11AF') ||
(c >= '\u11B7' && c <= '\u11B8') || c == '\u11BA' || (c >= '\u11BC' && c <= '\u11C2') ||
c == '\u11EB' || c == '\u11F0' || c == '\u11F9' || (c >= '\u1E00' && c <= '\u1E9B') || (c >= '\u1EA0' && c <= '\u1EF9') ||
(c >= '\u1F00' && c <= '\u1F15') || (c >= '\u1F18' && c <= '\u1F1D') || (c >= '\u1F20' && c <= '\u1F45') ||
(c >= '\u1F48' && c <= '\u1F4D') || (c >= '\u1F50' && c <= '\u1F57') || c == '\u1F59' || c == '\u1F5B' || c == '\u1F5D' ||
(c >= '\u1F5F' && c <= '\u1F7D') || (c >= '\u1F80' && c <= '\u1FB4') || (c >= '\u1FB6' && c <= '\u1FBC') ||
c == '\u1FBE' || (c >= '\u1FC2' && c <= '\u1FC4') || (c >= '\u1FC6' && c <= '\u1FCC') ||
(c >= '\u1FD0' && c <= '\u1FD3') || (c >= '\u1FD6' && c <= '\u1FDB') || (c >= '\u1FE0' && c <= '\u1FEC') ||
(c >= '\u1FF2' && c <= '\u1FF4') || (c >= '\u1FF6' && c <= '\u1FFC') || c == '\u2126' ||
(c >= '\u212A' && c <= '\u212B') || c == '\u212E' || (c >= '\u2180' && c <= '\u2182') ||
(c >= '\u3041' && c <= '\u3094') || (c >= '\u30A1' && c <= '\u30FA') || (c >= '\u3105' && c <= '\u312C') ||
(c >= '\uAC00' && c <= '\uD7A3');
}
private static boolean isIdeographic(char c) {
return (c >= '\u4E00' && c <= '\u9FA5') || c == '\u3007' || (c >= '\u3021' && c <= '\u3029');
}
public static String determineEncoding(InputStream stream) throws IOException {
stream.mark(20000);
try {
int b0 = stream.read();
int b1 = stream.read();
int b2 = stream.read();
int b3 = stream.read();
if (b0 == 0xFE && b1 == 0xFF)
return "UTF-16BE";
else if (b0 == 0xFF && b1 == 0xFE)
return "UTF-16LE";
else if (b0 == 0xEF && b1 == 0xBB && b2 == 0xBF )
return "UTF-8";
else if (b0 == 0x00 && b1 == 0x3C && b2 == 0x00 && b3 == 0x3F)
return "UTF-16BE";
else if (b0 == 0x3C && b1 == 0x00 && b2 == 0x3F && b3 == 0x00)
return "UTF-16LE";
else if (b0 == 0x3C && b1 == 0x3F && b2 == 0x78 && b3 == 0x6D) {
// UTF-8, ISO 646, ASCII, some part of ISO 8859, Shift-JIS, EUC, or any other 7-bit, 8-bit, or mixed-width encoding
// which ensures that the characters of ASCII have their normal positions, width, and values; the actual encoding
// declaration must be read to detect which of these applies, but since all of these encodings use the same bit patterns
// for the relevant ASCII characters, the encoding declaration itself may be read reliably
InputStreamReader rdr = new InputStreamReader(stream, "US-ASCII");
String hdr = readFirstLine(rdr);
return extractEncoding(hdr);
} else
return null;
} finally {
stream.reset();
}
}
private static String extractEncoding(String hdr) {
int i = hdr.indexOf("encoding=");
if (i == -1)
return null;
hdr = hdr.substring(i+9);
char sep = hdr.charAt(0);
hdr = hdr.substring(1);
i = hdr.indexOf(sep);
if (i == -1)
return null;
return hdr.substring(0, i);
}
private static String readFirstLine(InputStreamReader rdr) throws IOException {
char[] buf = new char[1];
StringBuffer bldr = new StringBuffer();
rdr.read(buf);
while (buf[0] != '>') {
bldr.append(buf[0]);
rdr.read(buf);
}
return bldr.toString();
}
public static boolean charSetImpliesAscii(String charset) {
return charset.equals("ISO-8859-1") || charset.equals("US-ASCII");
}
/**
* Converts the raw characters to XML escape characters.
*
* @param rawContent
* @param charset Null when charset is not known, so we assume it's unicode
* @param isNoLines
* @return escape string
*/
public static String escapeXML(String rawContent, String charset, boolean isNoLines) {
if (rawContent == null)
return "";
else {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < rawContent.length(); i++) {
char ch = rawContent.charAt(i);
if (ch == '\'')
sb.append("'");
else if (ch == '&')
sb.append("&");
else if (ch == '"')
sb.append(""");
else if (ch == '<')
sb.append("<");
else if (ch == '>')
sb.append(">");
else if (ch > '~' && charset != null && charSetImpliesAscii(charset))
// TODO - why is hashcode the only way to get the unicode number for the character
// in jre 5.0?
sb.append("&#x"+Integer.toHexString(ch).toUpperCase()+";");
else if (isNoLines) {
if (ch == '\r')
sb.append("
");
else if (ch != '\n')
sb.append(ch);
}
else
sb.append(ch);
}
return sb.toString();
}
}
public static Element getFirstChild(Element e) {
if (e == null)
return null;
Node n = e.getFirstChild();
while (n != null && n.getNodeType() != Node.ELEMENT_NODE)
n = n.getNextSibling();
return (Element) n;
}
public static Element getNamedChild(Element e, String name) {
Element c = getFirstChild(e);
while (c != null && !name.equals(c.getLocalName()) && !name.equals(c.getNodeName()))
c = getNextSibling(c);
return c;
}
public static Element getNextSibling(Element e) {
Node n = e.getNextSibling();
while (n != null && n.getNodeType() != Node.ELEMENT_NODE)
n = n.getNextSibling();
return (Element) n;
}
public static void getNamedChildren(Element e, String name, List<Element> set) {
Element c = getFirstChild(e);
while (c != null) {
if (name.equals(c.getLocalName()) || name.equals(c.getNodeName()) )
set.add(c);
c = getNextSibling(c);
}
}
public static String htmlToXmlEscapedPlainText(Element r) {
StringBuilder s = new StringBuilder();
Node n = r.getFirstChild();
boolean ws = false;
while (n != null) {
if (n.getNodeType() == Node.TEXT_NODE) {
String t = n.getTextContent().trim();
if (Utilities.noString(t))
ws = true;
else {
if (ws)
s.append(" ");
ws = false;
s.append(t);
}
}
if (n.getNodeType() == Node.ELEMENT_NODE) {
if (ws)
s.append(" ");
ws = false;
s.append(htmlToXmlEscapedPlainText((Element) n));
if (r.getNodeName().equals("br") || r.getNodeName().equals("p"))
s.append("\r\n");
}
n = n.getNextSibling();
}
return s.toString();
}
public static String htmlToXmlEscapedPlainText(String definition) throws ParserConfigurationException, SAXException, IOException {
return htmlToXmlEscapedPlainText(parseToDom("<div>"+definition+"</div>").getDocumentElement());
}
public static String elementToString(Element el) {
if (el == null)
return "";
Document document = el.getOwnerDocument();
DOMImplementationLS domImplLS = (DOMImplementationLS) document
.getImplementation();
LSSerializer serializer = domImplLS.createLSSerializer();
return serializer.writeToString(el);
}
public static String getNamedChildValue(Element element, String name) {
Element e = getNamedChild(element, name);
return e == null ? null : e.getAttribute("value");
}
public static void setNamedChildValue(Element element, String name, String value) throws FHIRException {
Element e = getNamedChild(element, name);
if (e == null)
throw new FHIRException("unable to find element "+name);
e.setAttribute("value", value);
}
public static void getNamedChildrenWithWildcard(Element focus, String name, List<Element> children) {
Element c = getFirstChild(focus);
while (c != null) {
String n = c.getLocalName() != null ? c.getLocalName() : c.getNodeName();
if (name.equals(n) || (name.endsWith("[x]") && n.startsWith(name.substring(0, name.length()-3))))
children.add(c);
c = getNextSibling(c);
}
}
public static void getNamedChildrenWithTails(Element focus, String name, List<Element> children, Set<String> typeTails) {
Element c = getFirstChild(focus);
while (c != null) {
String n = c.getLocalName() != null ? c.getLocalName() : c.getNodeName();
if (n.equals(name) || (!n.equals("responseCode") && (n.startsWith(name) && typeTails.contains(n.substring(name.length())))))
children.add(c);
c = getNextSibling(c);
}
}
public static boolean hasNamedChild(Element e, String name) {
Element c = getFirstChild(e);
while (c != null && !name.equals(c.getLocalName()) && !name.equals(c.getNodeName()))
c = getNextSibling(c);
return c != null;
}
public static Document parseToDom(String content) throws ParserConfigurationException, SAXException, IOException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(false);
DocumentBuilder builder = factory.newDocumentBuilder();
return builder.parse(new ByteArrayInputStream(content.getBytes()));
}
public static Document parseFileToDom(String filename) throws ParserConfigurationException, SAXException, IOException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(false);
DocumentBuilder builder = factory.newDocumentBuilder();
return builder.parse(new FileInputStream(filename));
}
public static Element getLastChild(Element e) {
if (e == null)
return null;
Node n = e.getLastChild();
while (n != null && n.getNodeType() != Node.ELEMENT_NODE)
n = n.getPreviousSibling();
return (Element) n;
}
public static Element getPrevSibling(Element e) {
Node n = e.getPreviousSibling();
while (n != null && n.getNodeType() != Node.ELEMENT_NODE)
n = n.getPreviousSibling();
return (Element) n;
}
public static String getNamedChildAttribute(Element element, String name, String aname) {
Element e = getNamedChild(element, name);
return e == null ? null : e.getAttribute(aname);
}
public static void writeDomToFile(Document doc, String filename) throws TransformerException {
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(doc);
StreamResult streamResult = new StreamResult(new File(filename));
transformer.transform(source, streamResult);
}
public static String getXsiType(org.w3c.dom.Element element) {
Attr a = element.getAttributeNodeNS("http://www.w3.org/2001/XMLSchema-instance", "type");
return (a == null ? null : a.getTextContent());
}
public static String getDirectText(org.w3c.dom.Element node) {
Node n = node.getFirstChild();
StringBuilder b = new StringBuilder();
while (n != null) {
if (n.getNodeType() == Node.TEXT_NODE)
b.append(n.getTextContent());
n = n.getNextSibling();
}
return b.toString().trim();
}
}
| |
package collide.junit;
import collide.junit.cases.ReflectionCaseNoMagic;
import static xapi.reflect.X_Reflect.magicClass;
import com.google.gwt.reflect.client.strategy.ReflectionStrategy;
@ReflectionStrategy(keepNothing=true)
@SuppressWarnings("rawtypes")
public class AbstractReflectionTest {
protected static final Class CLASS_OBJECT = magicClass(Object.class);
protected static final String METHOD_EQUALS = "equals";
protected static final String METHOD_HASHCODE = "hashCode";
protected static final String METHOD_TOSTRING = "toString";
protected static final String PRIVATE_MEMBER = "privateCall";
protected static final String PUBLIC_MEMBER = "publicCall";
protected static final String OVERRIDE_FIELD = "overrideField";
static final Class<ReflectionCaseNoMagic> NO_MAGIC = ReflectionCaseNoMagic.class;
static final Class<ReflectionCaseNoMagic.Subclass> NO_MAGIC_SUBCLASS = ReflectionCaseNoMagic.Subclass.class;
static public void fail(String message) {
if (message == null) {
throw new AssertionError();
}
throw new AssertionError(message);
}
/**
* Asserts that a condition is true. If it isn't it throws
* an AssertionFailedError with the given message.
*/
static public void assertTrue(String message, boolean condition) {
if (!condition) {
fail(message);
}
}
/**
* Asserts that a condition is true. If it isn't it throws
* an AssertionFailedError.
*/
static public void assertTrue(boolean condition) {
assertTrue(null, condition);
}
/**
* Asserts that a condition is false. If it isn't it throws
* an AssertionFailedError with the given message.
*/
static public void assertFalse(String message, boolean condition) {
assertTrue(message, !condition);
}
/**
* Asserts that a condition is false. If it isn't it throws
* an AssertionFailedError.
*/
static public void assertFalse(boolean condition) {
assertFalse(null, condition);
}
/**
* Asserts that an object isn't null. If it is
* an AssertionFailedError is thrown with the given message.
*/
static public void assertNotNull(String message, Object object) {
assertTrue(message, object != null);
}
/**
* Asserts that an object isn't null.
*/
static public void assertNotNull(Object object) {
assertNotNull(null, object);
}
/**
* Asserts that an object is null. If it isn't an {@link AssertionError} is
* thrown.
* Message contains: Expected: <null> but was: object
*
* @param object Object to check or <code>null</code>
*/
static public void assertNull(Object object) {
if (object != null) {
assertNull("Expected: <null> but was: " + object.toString(), object);
}
}
/**
* Asserts that an object is null. If it is not
* an AssertionFailedError is thrown with the given message.
*/
static public void assertNull(String message, Object object) {
assertTrue(message, object == null);
}
/**
* Asserts that two objects are equal. If they are not
* an AssertionFailedError is thrown with the given message.
*/
static public void assertEquals(String message, Object expected, Object actual) {
if (expected == null && actual == null) {
return;
}
if (expected != null && expected.equals(actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that two objects are equal. If they are not
* an AssertionFailedError is thrown.
*/
static public void assertEquals(Object expected, Object actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that two Strings are equal.
*/
static public void assertEquals(String message, String expected, String actual) {
if (expected == null && actual == null) {
return;
}
if (expected != null && expected.equals(actual)) {
return;
}
String cleanMessage = message == null ? "" : message;
throw new ComparisonFailure(cleanMessage, expected, actual);
}
/**
* Asserts that two Strings are equal.
*/
static public void assertEquals(String expected, String actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that two doubles are equal concerning a delta. If they are not
* an AssertionFailedError is thrown with the given message. If the expected
* value is infinity then the delta value is ignored.
*/
static public void assertEquals(String message, double expected, double actual, double delta) {
if (Double.compare(expected, actual) == 0) {
return;
}
if (!(Math.abs(expected - actual) <= delta)) {
failNotEquals(message, new Double(expected), new Double(actual));
}
}
/**
* Asserts that two doubles are equal concerning a delta. If the expected
* value is infinity then the delta value is ignored.
*/
static public void assertEquals(double expected, double actual, double delta) {
assertEquals(null, expected, actual, delta);
}
/**
* Asserts that two floats are equal concerning a positive delta. If they
* are not an AssertionFailedError is thrown with the given message. If the
* expected value is infinity then the delta value is ignored.
*/
static public void assertEquals(String message, float expected, float actual, float delta) {
if (Float.compare(expected, actual) == 0) {
return;
}
if (!(Math.abs(expected - actual) <= delta)) {
failNotEquals(message, new Float(expected), new Float(actual));
}
}
/**
* Asserts that two floats are equal concerning a delta. If the expected
* value is infinity then the delta value is ignored.
*/
static public void assertEquals(float expected, float actual, float delta) {
assertEquals(null, expected, actual, delta);
}
/**
* Asserts that two longs are equal. If they are not
* an AssertionFailedError is thrown with the given message.
*/
static public void assertEquals(String message, long expected, long actual) {
assertEquals(message, new Long(expected), new Long(actual));
}
/**
* Asserts that two longs are equal.
*/
static public void assertEquals(long expected, long actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that two booleans are equal. If they are not
* an AssertionFailedError is thrown with the given message.
*/
static public void assertEquals(String message, boolean expected, boolean actual) {
assertEquals(message, Boolean.valueOf(expected), Boolean.valueOf(actual));
}
/**
* Asserts that two booleans are equal.
*/
static public void assertEquals(boolean expected, boolean actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that two bytes are equal. If they are not
* an AssertionFailedError is thrown with the given message.
*/
static public void assertEquals(String message, byte expected, byte actual) {
assertEquals(message, new Byte(expected), new Byte(actual));
}
/**
* Asserts that two bytes are equal.
*/
static public void assertEquals(byte expected, byte actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that two chars are equal. If they are not
* an AssertionFailedError is thrown with the given message.
*/
static public void assertEquals(String message, char expected, char actual) {
assertEquals(message, new Character(expected), new Character(actual));
}
/**
* Asserts that two chars are equal.
*/
static public void assertEquals(char expected, char actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that two shorts are equal. If they are not
* an AssertionFailedError is thrown with the given message.
*/
static public void assertEquals(String message, short expected, short actual) {
assertEquals(message, new Short(expected), new Short(actual));
}
/**
* Asserts that two shorts are equal.
*/
static public void assertEquals(short expected, short actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that two ints are equal. If they are not
* an AssertionFailedError is thrown with the given message.
*/
static public void assertEquals(String message, int expected, int actual) {
assertEquals(message, new Integer(expected), new Integer(actual));
}
/**
* Asserts that two ints are equal.
*/
static public void assertEquals(int expected, int actual) {
assertEquals(null, expected, actual);
}
private static boolean isEquals(Object expected, Object actual) {
return expected.equals(actual);
}
private static boolean equalsRegardingNull(Object expected, Object actual) {
if (expected == null) {
return actual == null;
}
return isEquals(expected, actual);
}
/**
* Asserts that two objects are <b>not</b> equals. If they are, an
* {@link AssertionError} is thrown with the given message. If
* <code>first</code> and <code>second</code> are <code>null</code>,
* they are considered equal.
*
* @param message the identifying message for the {@link AssertionError} (<code>null</code>
* okay)
* @param first first value to check
* @param second the value to check against <code>first</code>
*/
static public void assertNotEquals(String message, Object first,
Object second) {
if (equalsRegardingNull(first, second)) {
failEquals(message, first);
}
}
/**
* Asserts that two objects are <b>not</b> equals. If they are, an
* {@link AssertionError} without a message is thrown. If
* <code>first</code> and <code>second</code> are <code>null</code>,
* they are considered equal.
*
* @param first first value to check
* @param second the value to check against <code>first</code>
*/
static public void assertNotEquals(Object first, Object second) {
assertNotEquals(null, first, second);
}
private static void failEquals(String message, Object actual) {
String formatted = "Values should be different. ";
if (message != null) {
formatted = message + ". ";
}
formatted += "Actual: " + actual;
fail(formatted);
}
/**
* Asserts that two longs are <b>not</b> equals. If they are, an
* {@link AssertionError} is thrown with the given message.
*
* @param message the identifying message for the {@link AssertionError} (<code>null</code>
* okay)
* @param first first value to check
* @param second the value to check against <code>first</code>
*/
static public void assertNotEquals(String message, long first, long second) {
assertNotEquals(message, (Long) first, (Long) second);
}
/**
* Asserts that two longs are <b>not</b> equals. If they are, an
* {@link AssertionError} without a message is thrown.
*
* @param first first value to check
* @param second the value to check against <code>first</code>
*/
static public void assertNotEquals(long first, long second) {
assertNotEquals(null, first, second);
}
/**
* Asserts that two doubles or floats are <b>not</b> equal to within a positive delta.
* If they are, an {@link AssertionError} is thrown with the given
* message. If the expected value is infinity then the delta value is
* ignored. NaNs are considered equal:
* <code>assertNotEquals(Double.NaN, Double.NaN, *)</code> fails
*
* @param message the identifying message for the {@link AssertionError} (<code>null</code>
* okay)
* @param first first value to check
* @param second the value to check against <code>first</code>
* @param delta the maximum delta between <code>expected</code> and
* <code>actual</code> for which both numbers are still
* considered equal.
*/
static public void assertNotEquals(String message, double first,
double second, double delta) {
if (!doubleIsDifferent(first, second, delta)) {
failEquals(message, new Double(first));
}
}
/**
* Asserts that two doubles or floats are <b>not</b> equal to within a positive delta.
* If they are, an {@link AssertionError} is thrown. If the expected
* value is infinity then the delta value is ignored.NaNs are considered
* equal: <code>assertNotEquals(Double.NaN, Double.NaN, *)</code> fails
*
* @param first first value to check
* @param second the value to check against <code>first</code>
* @param delta the maximum delta between <code>expected</code> and
* <code>actual</code> for which both numbers are still
* considered equal.
*/
static public void assertNotEquals(double first, double second, double delta) {
assertNotEquals(null, first, second, delta);
}
static private boolean doubleIsDifferent(double d1, double d2, double delta) {
if (Double.compare(d1, d2) == 0) {
return false;
}
if ((Math.abs(d1 - d2) <= delta)) {
return false;
}
return true;
}
static public void failNotEquals(String message, Object expected, Object actual) {
fail(format(message, expected, actual));
}
public static String format(String message, Object expected, Object actual) {
String formatted = "";
if (message != null && message.length() > 0) {
formatted = message + " ";
}
return formatted + "expected:<" + expected + "> but was:<" + actual + ">";
}
}
| |
package org.axway.grapes.server.webapp.resources;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter;
import com.yammer.dropwizard.auth.AuthenticationException;
import com.yammer.dropwizard.testing.ResourceTest;
import com.yammer.dropwizard.views.ViewMessageBodyWriter;
import org.axway.grapes.commons.api.ServerAPI;
import org.axway.grapes.commons.datamodel.Artifact;
import org.axway.grapes.commons.datamodel.DataModelFactory;
import org.axway.grapes.commons.datamodel.Scope;
import org.axway.grapes.commons.reports.DependencyList;
import org.axway.grapes.server.GrapesTestUtils;
import org.axway.grapes.server.config.GrapesServerConfig;
import org.axway.grapes.server.core.options.FiltersHolder;
import org.axway.grapes.server.db.RepositoryHandler;
import org.axway.grapes.server.db.datamodel.DbArtifact;
import org.axway.grapes.server.db.datamodel.DbLicense;
import org.axway.grapes.server.db.datamodel.DbModule;
import org.axway.grapes.server.webapp.auth.GrapesAuthProvider;
import org.eclipse.jetty.http.HttpStatus;
import org.junit.Test;
import javax.ws.rs.core.MediaType;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.*;
public class ArtifactResourceTest extends ResourceTest {
private RepositoryHandler repositoryHandler;
@Override
protected void setUpResources() throws Exception {
repositoryHandler = mock(RepositoryHandler.class);
final GrapesServerConfig dmConfig = GrapesTestUtils.getConfigMock();
ArtifactResource resource = new ArtifactResource(repositoryHandler, dmConfig);
addProvider(new GrapesAuthProvider(dmConfig));
addProvider(ViewMessageBodyWriter.class);
addResource(resource);
}
@Test
public void getDocumentation(){
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE);
ClientResponse response = resource.type(MediaType.TEXT_HTML).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
}
@Test
public void postArtifact() throws AuthenticationException, UnknownHostException {
Artifact artifact = DataModelFactory.createArtifact("groupId", "artifactId", "version", "classifier", "type", "extension");
artifact.setDownloadUrl("downloadUrl");
artifact.setSize("size");
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE);
ClientResponse response = resource.type(MediaType.APPLICATION_JSON).post(ClientResponse.class, artifact);
assertNotNull(response);
assertEquals(HttpStatus.CREATED_201, response.getStatus());
}
@Test
public void postMalFormedArtifact() throws AuthenticationException, UnknownHostException {
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE);
ClientResponse response = resource.type(MediaType.APPLICATION_JSON).post(ClientResponse.class, DataModelFactory.createArtifact(null, null, null, null, null, null));
assertNotNull(response);
assertEquals(HttpStatus.BAD_REQUEST_400, response.getStatus());
response = resource.type(MediaType.APPLICATION_JSON).post(ClientResponse.class, DataModelFactory.createArtifact("groupId", null, null, null, null, null));
assertNotNull(response);
assertEquals(HttpStatus.BAD_REQUEST_400, response.getStatus());
response = resource.type(MediaType.APPLICATION_JSON).post(ClientResponse.class, DataModelFactory.createArtifact("groupId", "artifactId", null, null, null, null));
assertNotNull(response);
assertEquals(HttpStatus.BAD_REQUEST_400, response.getStatus());
response = resource.type(MediaType.APPLICATION_JSON).post(ClientResponse.class, DataModelFactory.createArtifact("", "", "", null, null, null));
assertNotNull(response);
assertEquals(HttpStatus.BAD_REQUEST_400, response.getStatus());
}
@Test
public void updateDownloadURL() throws AuthenticationException, UnknownHostException {
final DbArtifact artifact = new DbArtifact();
artifact.setGroupId("groupId");
artifact.setArtifactId("artifactId");
artifact.setVersion("version");
artifact.setClassifier("classifier");
when(repositoryHandler.getArtifact(artifact.getGavc())).thenReturn(artifact);
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/" + artifact.getGavc() + ServerAPI.GET_DOWNLOAD_URL);
ClientResponse response = resource.queryParam(ServerAPI.URL_PARAM, "testUrl").post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
verify(repositoryHandler, times(1)).updateDownloadUrl(artifact, "testUrl");
}
@Test
public void updateDownloadURLMalFormed() throws AuthenticationException, UnknownHostException {
final DbArtifact artifact = new DbArtifact();
artifact.setGroupId("groupId");
artifact.setArtifactId("artifactId");
artifact.setVersion("version");
artifact.setClassifier("classifier");
when(repositoryHandler.getArtifact(artifact.getGavc())).thenReturn(artifact);
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/" + artifact.getGavc() + ServerAPI.GET_DOWNLOAD_URL);
ClientResponse response = resource.post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_ACCEPTABLE_406, response.getStatus());
}
@Test
public void updateProvider() throws AuthenticationException, UnknownHostException {
final DbArtifact artifact = new DbArtifact();
artifact.setGroupId("groupId");
artifact.setArtifactId("artifactId");
artifact.setVersion("version");
artifact.setClassifier("classifier");
when(repositoryHandler.getArtifact(artifact.getGavc())).thenReturn(artifact);
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/" + artifact.getGavc() + ServerAPI.GET_PROVIDER);
ClientResponse response = resource.queryParam(ServerAPI.PROVIDER_PARAM, "providerTest").post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
verify(repositoryHandler, times(1)).updateProvider(artifact, "providerTest");
}
@Test
public void updateProviderMalFormed() throws AuthenticationException, UnknownHostException {
final DbArtifact artifact = new DbArtifact();
artifact.setGroupId("groupId");
artifact.setArtifactId("artifactId");
artifact.setVersion("version");
artifact.setClassifier("classifier");
when(repositoryHandler.getArtifact(artifact.getGavc())).thenReturn(artifact);
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/" + artifact.getGavc() + ServerAPI.GET_PROVIDER);
ClientResponse response = resource.post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_ACCEPTABLE_406, response.getStatus());
}
@Test
public void getAllGavcs() throws UnknownHostException{
final List<String> gavcs = new ArrayList<String>();
gavcs.add("gavc1");
when(repositoryHandler.getGavcs((FiltersHolder) anyObject())).thenReturn(gavcs);
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + ServerAPI.GET_GAVCS);
ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
ArrayList<String> gavcsResults = response.getEntity(ArrayList.class);
assertNotNull(gavcsResults);
assertEquals(1, gavcsResults.size());
assertEquals("gavc1", gavcsResults.get(0));
}
@Test
public void getAllGroupIds() throws UnknownHostException{
final List<String> groupIds = new ArrayList<String>();
groupIds.add("groupId1");
when(repositoryHandler.getGroupIds((FiltersHolder) anyObject())).thenReturn(groupIds);
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + ServerAPI.GET_GROUPIDS);
ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
ArrayList<String> results = response.getEntity(ArrayList.class);
assertNotNull(results);
assertEquals(1, results.size());
assertEquals("groupId1", results.get(0));
}
@Test
public void getAnArtifact() throws UnknownHostException{
final DbArtifact dbArtifact = new DbArtifact();
dbArtifact.setGroupId("groupId");
dbArtifact.setArtifactId("artifactId");
dbArtifact.setVersion("1.0.0-SNAPSHOT");
dbArtifact.setClassifier("win");
dbArtifact.setType("component");
dbArtifact.setExtension("jar");
dbArtifact.setDownloadUrl("nowhere");
dbArtifact.setSize("10Mo");
final DbLicense license = new DbLicense();
license.setName("licenseId");
dbArtifact.addLicense(license);
when(repositoryHandler.getArtifact(dbArtifact.getGavc())).thenReturn(dbArtifact);
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/" + dbArtifact.getGavc());
ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
Artifact artifact = response.getEntity(Artifact.class);
assertNotNull(artifact);
assertEquals(dbArtifact.getGroupId(), artifact.getGroupId());
assertEquals(dbArtifact.getArtifactId(), artifact.getArtifactId());
assertEquals(dbArtifact.getVersion(), artifact.getVersion());
assertEquals(dbArtifact.getClassifier(), artifact.getClassifier());
assertEquals(dbArtifact.getType(), artifact.getType());
assertEquals(dbArtifact.getExtension(), artifact.getExtension());
assertEquals(dbArtifact.getSize(), artifact.getSize());
assertEquals(dbArtifact.getDownloadUrl(), artifact.getDownloadUrl());
assertEquals(1, artifact.getLicenses().size());
assertEquals("licenseId", artifact.getLicenses().get(0));
}
@Test
public void deleteAnArtifact() throws UnknownHostException, AuthenticationException{
when(repositoryHandler.getArtifact(anyString())).thenReturn(new DbArtifact());
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/gavc");
ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).delete(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
}
@Test
public void getAncestors() throws UnknownHostException {
final List<DbModule> dependencies = new ArrayList<DbModule>();
final DbModule module = new DbModule();
module.setName("module");
module.setVersion("version");
final DbArtifact artifact = new DbArtifact();
artifact.setGroupId("groupId");
artifact.setArtifactId("artifactId");
artifact.setVersion("version");
module.addDependency(artifact.getGavc(), Scope.TEST);
dependencies.add(module);
when(repositoryHandler.getAncestors(anyString(), (FiltersHolder) anyObject())).thenReturn(dependencies);
when(repositoryHandler.getArtifact(artifact.getGavc())).thenReturn(artifact);
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/" + artifact.getGavc() + ServerAPI.GET_ANCESTORS);
ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
DependencyList dependencyList = response.getEntity(DependencyList.class);
assertNotNull(dependencyList);
assertEquals(1, dependencyList.getDependencies().size());
assertEquals(module.getName(), dependencyList.getDependencies().get(0).getSourceName());
assertEquals(module.getVersion(), dependencyList.getDependencies().get(0).getSourceVersion());
assertEquals(artifact.getGavc(), dependencyList.getDependencies().get(0).getTarget().getGavc());
assertEquals(Scope.TEST, dependencyList.getDependencies().get(0).getScope());
}
@Test
public void getLicenses() throws UnknownHostException {
final DbArtifact artifact = new DbArtifact();
artifact.setGroupId("groupId");
artifact.setArtifactId("artifactId");
artifact.setVersion("version");
final DbLicense license = new DbLicense();
license.setName("licenseId");
artifact.addLicense(license);
when(repositoryHandler.getArtifact(artifact.getGavc())).thenReturn(artifact);
when(repositoryHandler.getLicense(license.getName())).thenReturn(license);
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/" + artifact.getGavc() + ServerAPI.GET_LICENSES);
ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
ArrayList<String> licenses = response.getEntity(ArrayList.class);
assertNotNull(licenses);
assertEquals(1, licenses.size());
assertEquals(license.getName(), licenses.get(0));
}
@Test
public void getAddLicenseToArtifact() throws AuthenticationException, UnknownHostException {
final DbArtifact artifact = new DbArtifact();
artifact.setGroupId("groupId");
artifact.setArtifactId("artifactId");
artifact.setVersion("version");
when(repositoryHandler.getArtifact(artifact.getGavc())).thenReturn(artifact);
final DbLicense license = new DbLicense();
license.setName("licenseId");
when(repositoryHandler.getLicense(license.getName())).thenReturn(license);
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/" + artifact.getGavc() + ServerAPI.GET_LICENSES);
ClientResponse response = resource.queryParam(ServerAPI.LICENSE_ID_PARAM, license.getName()).post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
verify(repositoryHandler, times(1)).addLicenseToArtifact(artifact, license);
}
@Test
public void getRemoveLicenseToArtifact() throws AuthenticationException, UnknownHostException {
final DbArtifact artifact = new DbArtifact();
artifact.setGroupId("groupId");
artifact.setArtifactId("artifactId");
artifact.setVersion("version");
final DbLicense license = new DbLicense();
license.setName("licenseId");
artifact.addLicense(license);
when(repositoryHandler.getArtifact(artifact.getGavc())).thenReturn(artifact);
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/" + artifact.getGavc() + ServerAPI.GET_LICENSES);
ClientResponse response = resource.queryParam(ServerAPI.LICENSE_ID_PARAM, license.getName()).delete(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
verify(repositoryHandler, times(1)).removeLicenseFromArtifact(artifact, license.getName());
}
@Test
public void addDoNotUseFlag() throws AuthenticationException, UnknownHostException {
final DbArtifact artifact = new DbArtifact();
artifact.setGroupId("groupId");
artifact.setArtifactId("artifactId");
artifact.setVersion("version");
when(repositoryHandler.getArtifact(artifact.getGavc())).thenReturn(artifact);
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/" + artifact.getGavc() + ServerAPI.SET_DO_NOT_USE);
ClientResponse response = resource.queryParam(ServerAPI.DO_NOT_USE, "true").post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
verify(repositoryHandler, times(1)).updateDoNotUse(artifact, Boolean.TRUE);
}
@Test
public void checkAuthenticationOnPostAndDeleteMethods(){
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE);
ClientResponse response = resource.post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.UNAUTHORIZED_401, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE);
response = resource.post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.UNAUTHORIZED_401, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/gavc");
response = resource.delete(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.UNAUTHORIZED_401, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/gavc" + ServerAPI.GET_LICENSES);
response = resource.post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.UNAUTHORIZED_401, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/gavc" + ServerAPI.GET_LICENSES);
response = resource.delete(ClientResponse.class);
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/gavc" + ServerAPI.SET_DO_NOT_USE);
response = resource.post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.UNAUTHORIZED_401, response.getStatus());
response = resource.delete(ClientResponse.class);
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/gavc" + ServerAPI.GET_DOWNLOAD_URL);
response = resource.post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.UNAUTHORIZED_401, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/gavc" + ServerAPI.GET_PROVIDER);
response = resource.post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.UNAUTHORIZED_401, response.getStatus());
}
@Test
public void notFound() throws AuthenticationException, UnknownHostException {
client().addFilter(new HTTPBasicAuthFilter(GrapesTestUtils.USER_4TEST, GrapesTestUtils.PASSWORD_4TEST));
WebResource resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/wrong" + ServerAPI.GET_LICENSES);
ClientResponse response = resource.queryParam(ServerAPI.LICENSE_ID_PARAM, "test").post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND_404, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/wrong" + ServerAPI.GET_LICENSES);
response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND_404, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/wrong" + ServerAPI.GET_ANCESTORS);
response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND_404, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/wrongGavc");
response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND_404, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/wrongGavc");
response = resource.accept(MediaType.APPLICATION_JSON).delete(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND_404, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/wrongGavc" + ServerAPI.GET_LICENSES);
response = resource.queryParam(ServerAPI.LICENSE_ID_PARAM, "test").delete(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND_404, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/wrongGavc" + ServerAPI.GET_LICENSES);
response = resource.queryParam(ServerAPI.LICENSE_ID_PARAM, "test").delete(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND_404, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/wrongGavc" + ServerAPI.SET_DO_NOT_USE);
response = resource.queryParam(ServerAPI.DO_NOT_USE, "true").post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND_404, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/wrongGavc" + ServerAPI.GET_DOWNLOAD_URL);
response = resource.queryParam(ServerAPI.URL_PARAM, "test").post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND_404, response.getStatus());
resource = client().resource("/" + ServerAPI.ARTIFACT_RESOURCE + "/wrongGavc" + ServerAPI.GET_PROVIDER);
response = resource.queryParam(ServerAPI.PROVIDER_PARAM, "test").post(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND_404, response.getStatus());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.fraction;
import java.io.Serializable;
import java.text.FieldPosition;
import java.text.NumberFormat;
import java.text.ParsePosition;
import java.util.Locale;
import org.apache.commons.math3.exception.NullArgumentException;
import org.apache.commons.math3.exception.util.LocalizedFormats;
/**
* Common part shared by both {@link FractionFormat} and {@link BigFractionFormat}.
* @since 2.0
*/
public abstract class AbstractFormat extends NumberFormat implements Serializable {
/** Serializable version identifier. */
private static final long serialVersionUID = -6981118387974191891L;
/** The format used for the denominator. */
private NumberFormat denominatorFormat;
/** The format used for the numerator. */
private NumberFormat numeratorFormat;
/**
* Create an improper formatting instance with the default number format
* for the numerator and denominator.
*/
protected AbstractFormat() {
this(getDefaultNumberFormat());
}
/**
* Create an improper formatting instance with a custom number format for
* both the numerator and denominator.
* @param format the custom format for both the numerator and denominator.
*/
protected AbstractFormat(final NumberFormat format) {
this(format, (NumberFormat) format.clone());
}
/**
* Create an improper formatting instance with a custom number format for
* the numerator and a custom number format for the denominator.
* @param numeratorFormat the custom format for the numerator.
* @param denominatorFormat the custom format for the denominator.
*/
protected AbstractFormat(final NumberFormat numeratorFormat,
final NumberFormat denominatorFormat) {
this.numeratorFormat = numeratorFormat;
this.denominatorFormat = denominatorFormat;
}
/**
* Create a default number format. The default number format is based on
* {@link NumberFormat#getNumberInstance(java.util.Locale)}. The only
* customization is the maximum number of BigFraction digits, which is set to 0.
* @return the default number format.
*/
protected static NumberFormat getDefaultNumberFormat() {
return getDefaultNumberFormat(Locale.getDefault());
}
/**
* Create a default number format. The default number format is based on
* {@link NumberFormat#getNumberInstance(java.util.Locale)}. The only
* customization is the maximum number of BigFraction digits, which is set to 0.
* @param locale the specific locale used by the format.
* @return the default number format specific to the given locale.
*/
protected static NumberFormat getDefaultNumberFormat(final Locale locale) {
final NumberFormat nf = NumberFormat.getNumberInstance(locale);
nf.setMaximumFractionDigits(0);
nf.setParseIntegerOnly(true);
return nf;
}
/**
* Access the denominator format.
* @return the denominator format.
*/
public NumberFormat getDenominatorFormat() {
return denominatorFormat;
}
/**
* Access the numerator format.
* @return the numerator format.
*/
public NumberFormat getNumeratorFormat() {
return numeratorFormat;
}
/**
* Modify the denominator format.
* @param format the new denominator format value.
* @throws NullArgumentException if {@code format} is {@code null}.
*/
public void setDenominatorFormat(final NumberFormat format) {
if (format == null) {
throw new NullArgumentException(LocalizedFormats.DENOMINATOR_FORMAT);
}
this.denominatorFormat = format;
}
/**
* Modify the numerator format.
* @param format the new numerator format value.
* @throws NullArgumentException if {@code format} is {@code null}.
*/
public void setNumeratorFormat(final NumberFormat format) {
if (format == null) {
throw new NullArgumentException(LocalizedFormats.NUMERATOR_FORMAT);
}
this.numeratorFormat = format;
}
/**
* Parses <code>source</code> until a non-whitespace character is found.
* @param source the string to parse
* @param pos input/output parsing parameter. On output, <code>pos</code>
* holds the index of the next non-whitespace character.
*/
protected static void parseAndIgnoreWhitespace(final String source,
final ParsePosition pos) {
parseNextCharacter(source, pos);
pos.setIndex(pos.getIndex() - 1);
}
/**
* Parses <code>source</code> until a non-whitespace character is found.
* @param source the string to parse
* @param pos input/output parsing parameter.
* @return the first non-whitespace character.
*/
protected static char parseNextCharacter(final String source,
final ParsePosition pos) {
int index = pos.getIndex();
final int n = source.length();
char ret = 0;
if (index < n) {
char c;
do {
c = source.charAt(index++);
} while (Character.isWhitespace(c) && index < n);
pos.setIndex(index);
if (index < n) {
ret = c;
}
}
return ret;
}
/**
* Formats a double value as a fraction and appends the result to a StringBuffer.
*
* @param value the double value to format
* @param buffer StringBuffer to append to
* @param position On input: an alignment field, if desired. On output: the
* offsets of the alignment field
* @return a reference to the appended buffer
* @see #format(Object, StringBuffer, FieldPosition)
*/
@Override
public StringBuffer format(final double value,
final StringBuffer buffer, final FieldPosition position) {
return format(Double.valueOf(value), buffer, position);
}
/**
* Formats a long value as a fraction and appends the result to a StringBuffer.
*
* @param value the long value to format
* @param buffer StringBuffer to append to
* @param position On input: an alignment field, if desired. On output: the
* offsets of the alignment field
* @return a reference to the appended buffer
* @see #format(Object, StringBuffer, FieldPosition)
*/
@Override
public StringBuffer format(final long value,
final StringBuffer buffer, final FieldPosition position) {
return format(Long.valueOf(value), buffer, position);
}
}
| |
package org.apache.maven.repository.legacy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
import org.apache.maven.artifact.repository.Authentication;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
import org.apache.maven.repository.Proxy;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.logging.Logger;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.util.Date;
import java.util.Properties;
/**
* DefaultUpdateCheckManager
*/
@Component( role = UpdateCheckManager.class )
public class DefaultUpdateCheckManager
extends AbstractLogEnabled
implements UpdateCheckManager
{
private static final String ERROR_KEY_SUFFIX = ".error";
public DefaultUpdateCheckManager()
{
}
public DefaultUpdateCheckManager( Logger logger )
{
enableLogging( logger );
}
public static final String LAST_UPDATE_TAG = ".lastUpdated";
private static final String TOUCHFILE_NAME = "resolver-status.properties";
public boolean isUpdateRequired( Artifact artifact, ArtifactRepository repository )
{
File file = artifact.getFile();
ArtifactRepositoryPolicy policy = artifact.isSnapshot() ? repository.getSnapshots() : repository.getReleases();
if ( !policy.isEnabled() )
{
if ( getLogger().isDebugEnabled() )
{
getLogger().debug(
"Skipping update check for " + artifact + " (" + file + ") from " + repository.getId() + " ("
+ repository.getUrl() + ")" );
}
return false;
}
if ( getLogger().isDebugEnabled() )
{
getLogger().debug(
"Determining update check for " + artifact + " (" + file + ") from " + repository.getId() + " ("
+ repository.getUrl() + ")" );
}
if ( file == null )
{
// TODO throw something instead?
return true;
}
Date lastCheckDate;
if ( file.exists() )
{
lastCheckDate = new Date( file.lastModified() );
}
else
{
File touchfile = getTouchfile( artifact );
lastCheckDate = readLastUpdated( touchfile, getRepositoryKey( repository ) );
}
return ( lastCheckDate == null ) || policy.checkOutOfDate( lastCheckDate );
}
public boolean isUpdateRequired( RepositoryMetadata metadata, ArtifactRepository repository, File file )
{
// Here, we need to determine which policy to use. Release updateInterval will be used when
// the metadata refers to a release artifact or meta-version, and snapshot updateInterval will be used when
// it refers to a snapshot artifact or meta-version.
// NOTE: Release metadata includes version information about artifacts that have been released, to allow
// meta-versions like RELEASE and LATEST to resolve, and also to allow retrieval of the range of valid, released
// artifacts available.
ArtifactRepositoryPolicy policy = metadata.getPolicy( repository );
if ( !policy.isEnabled() )
{
if ( getLogger().isDebugEnabled() )
{
getLogger().debug(
"Skipping update check for " + metadata.getKey() + " (" + file + ") from " + repository.getId()
+ " (" + repository.getUrl() + ")" );
}
return false;
}
if ( getLogger().isDebugEnabled() )
{
getLogger().debug(
"Determining update check for " + metadata.getKey() + " (" + file + ") from " + repository.getId()
+ " (" + repository.getUrl() + ")" );
}
if ( file == null )
{
// TODO throw something instead?
return true;
}
Date lastCheckDate = readLastUpdated( metadata, repository, file );
return ( lastCheckDate == null ) || policy.checkOutOfDate( lastCheckDate );
}
private Date readLastUpdated( RepositoryMetadata metadata, ArtifactRepository repository, File file )
{
File touchfile = getTouchfile( metadata, file );
String key = getMetadataKey( repository, file );
return readLastUpdated( touchfile, key );
}
public String getError( Artifact artifact, ArtifactRepository repository )
{
File touchFile = getTouchfile( artifact );
return getError( touchFile, getRepositoryKey( repository ) );
}
public void touch( Artifact artifact, ArtifactRepository repository, String error )
{
File file = artifact.getFile();
File touchfile = getTouchfile( artifact );
if ( file.exists() )
{
touchfile.delete();
}
else
{
writeLastUpdated( touchfile, getRepositoryKey( repository ), error );
}
}
public void touch( RepositoryMetadata metadata, ArtifactRepository repository, File file )
{
File touchfile = getTouchfile( metadata, file );
String key = getMetadataKey( repository, file );
writeLastUpdated( touchfile, key, null );
}
String getMetadataKey( ArtifactRepository repository, File file )
{
return repository.getId() + '.' + file.getName() + LAST_UPDATE_TAG;
}
String getRepositoryKey( ArtifactRepository repository )
{
StringBuilder buffer = new StringBuilder( 256 );
Proxy proxy = repository.getProxy();
if ( proxy != null )
{
if ( proxy.getUserName() != null )
{
int hash = ( proxy.getUserName() + proxy.getPassword() ).hashCode();
buffer.append( hash ).append( '@' );
}
buffer.append( proxy.getHost() ).append( ':' ).append( proxy.getPort() ).append( '>' );
}
// consider the username&password because a repo manager might block artifacts depending on authorization
Authentication auth = repository.getAuthentication();
if ( auth != null )
{
int hash = ( auth.getUsername() + auth.getPassword() ).hashCode();
buffer.append( hash ).append( '@' );
}
// consider the URL (instead of the id) as this most closely relates to the contents in the repo
buffer.append( repository.getUrl() );
return buffer.toString();
}
private void writeLastUpdated( File touchfile, String key, String error )
{
synchronized ( touchfile.getAbsolutePath().intern() )
{
if ( !touchfile.getParentFile().exists() && !touchfile.getParentFile().mkdirs() )
{
getLogger().debug( "Failed to create directory: " + touchfile.getParent()
+ " for tracking artifact metadata resolution." );
return;
}
FileChannel channel = null;
FileLock lock = null;
try
{
Properties props = new Properties();
channel = new RandomAccessFile( touchfile, "rw" ).getChannel();
lock = channel.lock();
if ( touchfile.canRead() )
{
getLogger().debug( "Reading resolution-state from: " + touchfile );
props.load( Channels.newInputStream( channel ) );
}
props.setProperty( key, Long.toString( System.currentTimeMillis() ) );
if ( error != null )
{
props.setProperty( key + ERROR_KEY_SUFFIX, error );
}
else
{
props.remove( key + ERROR_KEY_SUFFIX );
}
getLogger().debug( "Writing resolution-state to: " + touchfile );
channel.truncate( 0 );
props.store( Channels.newOutputStream( channel ), "Last modified on: " + new Date() );
lock.release();
lock = null;
channel.close();
channel = null;
}
catch ( IOException e )
{
getLogger().debug(
"Failed to record lastUpdated information for resolution.\nFile: " + touchfile.toString()
+ "; key: " + key, e );
}
finally
{
if ( lock != null )
{
try
{
lock.release();
}
catch ( IOException e )
{
getLogger().debug( "Error releasing exclusive lock for resolution tracking file: " + touchfile,
e );
}
}
if ( channel != null )
{
try
{
channel.close();
}
catch ( IOException e )
{
getLogger().debug( "Error closing FileChannel for resolution tracking file: " + touchfile, e );
}
}
}
}
}
Date readLastUpdated( File touchfile, String key )
{
getLogger().debug( "Searching for " + key + " in resolution tracking file." );
Properties props = read( touchfile );
if ( props != null )
{
String rawVal = props.getProperty( key );
if ( rawVal != null )
{
try
{
return new Date( Long.parseLong( rawVal ) );
}
catch ( NumberFormatException e )
{
getLogger().debug( "Cannot parse lastUpdated date: \'" + rawVal + "\'. Ignoring.", e );
}
}
}
return null;
}
private String getError( File touchFile, String key )
{
Properties props = read( touchFile );
if ( props != null )
{
return props.getProperty( key + ERROR_KEY_SUFFIX );
}
return null;
}
private Properties read( File touchfile )
{
if ( !touchfile.canRead() )
{
getLogger().debug( "Skipped unreadable resolution tracking file " + touchfile );
return null;
}
synchronized ( touchfile.getAbsolutePath().intern() )
{
FileInputStream in = null;
FileLock lock = null;
try
{
Properties props = new Properties();
in = new FileInputStream( touchfile );
lock = in.getChannel().lock( 0, Long.MAX_VALUE, true );
getLogger().debug( "Reading resolution-state from: " + touchfile );
props.load( in );
lock.release();
lock = null;
in.close();
in = null;
return props;
}
catch ( IOException e )
{
getLogger().debug( "Failed to read resolution tracking file " + touchfile, e );
return null;
}
finally
{
if ( lock != null )
{
try
{
lock.release();
}
catch ( IOException e )
{
getLogger().debug( "Error releasing shared lock for resolution tracking file: " + touchfile,
e );
}
}
if ( in != null )
{
try
{
in.close();
}
catch ( IOException e )
{
getLogger().debug( "Error closing FileChannel for resolution tracking file: " + touchfile, e );
}
}
}
}
}
File getTouchfile( Artifact artifact )
{
StringBuilder sb = new StringBuilder( 128 );
sb.append( artifact.getArtifactId() );
sb.append( '-' ).append( artifact.getBaseVersion() );
if ( artifact.getClassifier() != null )
{
sb.append( '-' ).append( artifact.getClassifier() );
}
sb.append( '.' ).append( artifact.getType() ).append( LAST_UPDATE_TAG );
return new File( artifact.getFile().getParentFile(), sb.toString() );
}
File getTouchfile( RepositoryMetadata metadata, File file )
{
return new File( file.getParent(), TOUCHFILE_NAME );
}
}
| |
/*
* Copyright (C) 2015 Lable (info@lable.nl)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lable.oss.dynamicconfig.provider.zookeeper;
import org.apache.commons.configuration.BaseConfiguration;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.HierarchicalConfiguration;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.server.ServerConfig;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.lable.oss.dynamicconfig.Precomputed;
import org.lable.oss.dynamicconfig.core.*;
import org.lable.oss.dynamicconfig.core.spi.HierarchicalConfigurationDeserializer;
import org.lable.oss.dynamicconfig.serialization.yaml.YamlDeserializer;
import org.mockito.ArgumentCaptor;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.lable.oss.dynamicconfig.core.ConfigurationManager.*;
import static org.lable.oss.dynamicconfig.provider.zookeeper.ZookeeperTestUtil.*;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
public class ZookeepersAsConfigSourceIT {
private Thread server;
private String zookeeperHost;
private Configuration testConfig;
private ZooKeeper zookeeper;
private ZookeeperTestUtil.ZooKeeperThread zkServer;
@Before
public void setUp() throws Exception {
final String clientPort = "21818";
final String dataDirectory = System.getProperty("java.io.tmpdir");
zookeeperHost = "localhost:" + clientPort;
ServerConfig config = new ServerConfig();
config.parse(new String[] { clientPort, dataDirectory });
testConfig = new BaseConfiguration();
testConfig.setProperty("quorum", zookeeperHost);
testConfig.setProperty("znode", "/config");
testConfig.setProperty(APPNAME_PROPERTY, "test");
testConfig.setProperty(ROOTCONFIG_PROPERTY, "test");
zkServer = new ZookeeperTestUtil.ZooKeeperThread(config);
server = new Thread(zkServer);
server.start();
zookeeper = connect(zookeeperHost);
}
@Test(expected = ConfigurationException.class)
public void testLoadNoNode() throws ConfigurationException {
ConfigChangeListener mockListener = mock(ConfigChangeListener.class);
// Assert that load() fails when a nonexistent node is passed as argument.
ZookeepersAsConfigSource source = new ZookeepersAsConfigSource();
Configuration config = new BaseConfiguration();
config.setProperty("quorum", zookeeperHost);
config.setProperty("znode", "/nope/nope");
config.setProperty(APPNAME_PROPERTY, "nope");
source.configure(config, new BaseConfiguration(), mockListener);
source.load("nope");
}
@Test
public void testLoad() throws Exception {
ConfigChangeListener mockListener = mock(ConfigChangeListener.class);
HierarchicalConfigurationDeserializer deserializer = new YamlDeserializer();
ArgumentCaptor<InputStream> argument = ArgumentCaptor.forClass(InputStream.class);
// Prepare the znode on the ZooKeeper.
setData(zookeeper, "test", "config:\n string: XXX");
ZookeepersAsConfigSource source = new ZookeepersAsConfigSource();
source.configure(testConfig, new BaseConfiguration(), mockListener);
source.listen("test");
TimeUnit.MILLISECONDS.sleep(100);
setData(zookeeper, "test", "config:\n string: YYY");
TimeUnit.MILLISECONDS.sleep(100);
verify(mockListener).changed(eq("test"), argument.capture());
ConfigurationResult config = deserializer.deserialize(argument.getValue());
assertThat(config.getConfiguration().getString("config.string"), is("YYY"));
}
@Test
public void testListen() throws Exception {
final String VALUE_A = "key: AAA\n";
final String VALUE_B = "key: BBB\n";
final String VALUE_C = "key: CCC\n";
final String VALUE_D = "key: DDD\n";
// Initial value. This should not be returned by the listener, but is required to make sure the node exists.
setData(zookeeper, "test", VALUE_A);
HierarchicalConfigurationDeserializer deserializer = new YamlDeserializer();
// Setup a listener to gather all returned configuration values.
final HierarchicalConfiguration defaults = new HierarchicalConfiguration();
final List<String> results = new ArrayList<>();
ConfigChangeListener listener = (name, is) -> {
ConfigurationResult conf;
try {
conf = deserializer.deserialize(is);
} catch (ConfigurationException e) {
return;
}
String key = conf.getConfiguration().getString("key");
results.add(key);
};
ZookeepersAsConfigSource source = new ZookeepersAsConfigSource();
source.configure(testConfig, defaults, listener);
source.listen("test");
TimeUnit.MILLISECONDS.sleep(300);
setData(zookeeper, "test", VALUE_B);
TimeUnit.MILLISECONDS.sleep(300);
deleteNode(zookeeper, "test");
TimeUnit.MILLISECONDS.sleep(300);
setData(zookeeper, "test", VALUE_C);
TimeUnit.MILLISECONDS.sleep(300);
setData(zookeeper, "test", "{BOGUS_YAML");
TimeUnit.MILLISECONDS.sleep(300);
setData(zookeeper, "test", VALUE_D);
TimeUnit.MILLISECONDS.sleep(300);
source.stopListening("test");
TimeUnit.MILLISECONDS.sleep(300);
setData(zookeeper, "test", VALUE_A);
TimeUnit.MILLISECONDS.sleep(300);
source.listen("test");
TimeUnit.MILLISECONDS.sleep(300);
setData(zookeeper, "test", VALUE_B);
TimeUnit.MILLISECONDS.sleep(300);
assertThat(results.size(), is(4));
assertThat(results.get(0), is("BBB"));
assertThat(results.get(1), is("CCC"));
assertThat(results.get(2), is("DDD"));
assertThat(results.get(3), is("BBB"));
source.close();
TimeUnit.MILLISECONDS.sleep(300);
setData(zookeeper, "test", "{BOGUS_YAML");
TimeUnit.MILLISECONDS.sleep(300);
assertThat(results.size(), is(4));
}
@Test
public void configurationMonitorTest() throws Exception {
setData(zookeeper, "test", "\n");
System.setProperty(LIBRARY_PREFIX + ".type", "zookeeper");
System.setProperty(LIBRARY_PREFIX + ".zookeeper.znode", "/config");
System.setProperty(LIBRARY_PREFIX + ".zookeeper.quorum", zookeeperHost);
System.setProperty(LIBRARY_PREFIX + "." + APPNAME_PROPERTY, "test");
HierarchicalConfiguration defaults = new HierarchicalConfiguration();
defaults.setProperty("key", "DEFAULT");
InitializedConfiguration ic = ConfigurationManager.configureFromProperties(
defaults, new YamlDeserializer()
);
Configuration configuration = ic.getConfiguration();
final AtomicInteger count = new AtomicInteger(0);
Precomputed<String> precomputed = Precomputed.monitorByUpdate(
configuration,
config -> {
count.incrementAndGet();
return config.getString("key");
}
);
assertThat(precomputed.get(), is("DEFAULT"));
assertThat(count.get(), is(1));
assertThat(configuration.getString("key"), is("DEFAULT"));
TimeUnit.MILLISECONDS.sleep(300);
assertThat(precomputed.get(), is("DEFAULT"));
assertThat(count.get(), is(1));
setData(zookeeper, "test", "key: AAA");
TimeUnit.MILLISECONDS.sleep(300);
assertThat(count.get(), is(1));
assertThat(configuration.getString("key"), is("AAA"));
assertThat(precomputed.get(), is("AAA"));
assertThat(count.get(), is(2));
assertThat(precomputed.get(), is("AAA"));
assertThat(count.get(), is(2));
}
@Test
public void configurationMonitorIncludedTest() throws Exception {
setData(zookeeper, "test", "extends:\n - inc.yaml\n");
setData(zookeeper, "inc.yaml", "\n");
System.setProperty(LIBRARY_PREFIX + ".type", "zookeeper");
System.setProperty(LIBRARY_PREFIX + ".zookeeper.znode", "/config");
System.setProperty(LIBRARY_PREFIX + ".zookeeper.quorum", zookeeperHost);
System.setProperty(LIBRARY_PREFIX + "." + APPNAME_PROPERTY, "test");
HierarchicalConfiguration defaults = new HierarchicalConfiguration();
defaults.setProperty("key", "DEFAULT");
InitializedConfiguration ic = ConfigurationManager.configureFromProperties(
defaults, new YamlDeserializer()
);
Configuration configuration = ic.getConfiguration();
final AtomicInteger count = new AtomicInteger(0);
Precomputed<String> precomputed = Precomputed.monitorByUpdate(
configuration,
config -> {
count.incrementAndGet();
return config.getString("key");
}
);
assertThat(precomputed.get(), is("DEFAULT"));
assertThat(count.get(), is(1));
assertThat(configuration.getString("key"), is("DEFAULT"));
TimeUnit.MILLISECONDS.sleep(300);
assertThat(precomputed.get(), is("DEFAULT"));
assertThat(count.get(), is(1));
setData(zookeeper, "inc.yaml", "key: AAA");
TimeUnit.MILLISECONDS.sleep(300);
assertThat(count.get(), is(1));
assertThat(configuration.getString("key"), is("AAA"));
assertThat(precomputed.get(), is("AAA"));
assertThat(count.get(), is(2));
assertThat(precomputed.get(), is("AAA"));
assertThat(count.get(), is(2));
}
@Test
public void viaInitializerTest() throws Exception {
setData(zookeeper, "test", "\n");
System.setProperty(LIBRARY_PREFIX + ".type", "zookeeper");
System.setProperty(LIBRARY_PREFIX + ".zookeeper.znode", "/config");
System.setProperty(LIBRARY_PREFIX + ".zookeeper.quorum", zookeeperHost);
System.setProperty(LIBRARY_PREFIX + "." + APPNAME_PROPERTY, "test");
HierarchicalConfiguration defaults = new HierarchicalConfiguration();
defaults.setProperty("key", "DEFAULT");
InitializedConfiguration ic = ConfigurationManager.configureFromProperties(
defaults, new YamlDeserializer()
);
Configuration configuration = ic.getConfiguration();
assertThat(configuration.getString("key"), is("DEFAULT"));
setData(zookeeper, "test", "key: AAA");
TimeUnit.MILLISECONDS.sleep(300);
assertThat(configuration.getString("key"), is("AAA"));
}
@After
public void tearDown() throws Exception {
zookeeper.close();
zkServer.shutdown();
TimeUnit.SECONDS.sleep(1);
server.interrupt();
}
}
| |
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.beyondj.gateway.handlers.detecting.protocol.openwire.command;
import com.beyondj.gateway.handlers.detecting.protocol.openwire.codec.OpenWireFormat;
import com.beyondj.gateway.handlers.detecting.protocol.openwire.support.MarshallingSupport;
import com.beyondj.gateway.handlers.detecting.protocol.openwire.support.OpenwireException;
import com.beyondj.gateway.handlers.detecting.protocol.openwire.support.Settings;
import org.fusesource.hawtbuf.Buffer;
import org.fusesource.hawtbuf.ByteArrayInputStream;
import org.fusesource.hawtbuf.ByteArrayOutputStream;
import java.io.*;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterInputStream;
/**
* @openwire:marshaller
*/
public class ActiveMQMapMessage extends ActiveMQMessage {
public static final byte DATA_STRUCTURE_TYPE = CommandTypes.ACTIVEMQ_MAP_MESSAGE;
protected transient Map<String, Object> map = new HashMap<String, Object>();
private Object readResolve() throws ObjectStreamException {
if(this.map == null) {
this.map = new HashMap<String, Object>();
}
return this;
}
public Message copy() {
ActiveMQMapMessage copy = new ActiveMQMapMessage();
copy(copy);
return copy;
}
private void copy(ActiveMQMapMessage copy) {
storeContent();
super.copy(copy);
}
// We only need to marshal the content if we are hitting the wire.
public void beforeMarshall(OpenWireFormat wireFormat) throws IOException {
super.beforeMarshall(wireFormat);
storeContent();
}
public void clearMarshalledState() {
super.clearMarshalledState();
map.clear();
}
private void storeContent() {
try {
if (getContent() == null && !map.isEmpty()) {
ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
OutputStream os = bytesOut;
if (Settings.enable_compression()) {
compressed = true;
os = new DeflaterOutputStream(os);
}
DataOutputStream dataOut = new DataOutputStream(os);
MarshallingSupport.marshalPrimitiveMap(map, dataOut);
dataOut.close();
setContent(bytesOut.toBuffer());
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Builds the message body from data
*
* @throws OpenwireException
*/
private void loadContent() throws OpenwireException {
try {
if (getContent() != null && map.isEmpty()) {
Buffer content = getContent();
InputStream is = new ByteArrayInputStream(content);
if (isCompressed()) {
is = new InflaterInputStream(is);
}
DataInputStream dataIn = new DataInputStream(is);
map = MarshallingSupport.unmarshalPrimitiveMap(dataIn);
dataIn.close();
}
} catch (IOException e) {
throw new OpenwireException(e);
}
}
public byte getDataStructureType() {
return DATA_STRUCTURE_TYPE;
}
public String getJMSXMimeType() {
return "jms/map-message";
}
public void clearBody() throws OpenwireException {
super.clearBody();
map.clear();
}
public boolean getBoolean(String name) throws OpenwireException {
initializeReading();
Object value = map.get(name);
if (value == null) {
return false;
}
if (value instanceof Boolean) {
return ((Boolean)value).booleanValue();
}
if (value instanceof String) {
return Boolean.valueOf(value.toString()).booleanValue();
} else {
throw new OpenwireException(" cannot read a boolean from " + value.getClass().getName());
}
}
public byte getByte(String name) throws OpenwireException {
initializeReading();
Object value = map.get(name);
if (value == null) {
return 0;
}
if (value instanceof Byte) {
return ((Byte)value).byteValue();
}
if (value instanceof String) {
return Byte.valueOf(value.toString()).byteValue();
} else {
throw new OpenwireException(" cannot read a byte from " + value.getClass().getName());
}
}
public short getShort(String name) throws OpenwireException {
initializeReading();
Object value = map.get(name);
if (value == null) {
return 0;
}
if (value instanceof Short) {
return ((Short)value).shortValue();
}
if (value instanceof Byte) {
return ((Byte)value).shortValue();
}
if (value instanceof String) {
return Short.valueOf(value.toString()).shortValue();
} else {
throw new OpenwireException(" cannot read a short from " + value.getClass().getName());
}
}
public char getChar(String name) throws OpenwireException {
initializeReading();
Object value = map.get(name);
if (value == null) {
throw new NullPointerException();
}
if (value instanceof Character) {
return ((Character)value).charValue();
} else {
throw new OpenwireException(" cannot read a short from " + value.getClass().getName());
}
}
public int getInt(String name) throws OpenwireException {
initializeReading();
Object value = map.get(name);
if (value == null) {
return 0;
}
if (value instanceof Integer) {
return ((Integer)value).intValue();
}
if (value instanceof Short) {
return ((Short)value).intValue();
}
if (value instanceof Byte) {
return ((Byte)value).intValue();
}
if (value instanceof String) {
return Integer.valueOf(value.toString()).intValue();
} else {
throw new OpenwireException(" cannot read an int from " + value.getClass().getName());
}
}
public long getLong(String name) throws OpenwireException {
initializeReading();
Object value = map.get(name);
if (value == null) {
return 0;
}
if (value instanceof Long) {
return ((Long)value).longValue();
}
if (value instanceof Integer) {
return ((Integer)value).longValue();
}
if (value instanceof Short) {
return ((Short)value).longValue();
}
if (value instanceof Byte) {
return ((Byte)value).longValue();
}
if (value instanceof String) {
return Long.valueOf(value.toString()).longValue();
} else {
throw new OpenwireException(" cannot read a long from " + value.getClass().getName());
}
}
public float getFloat(String name) throws OpenwireException {
initializeReading();
Object value = map.get(name);
if (value == null) {
return 0;
}
if (value instanceof Float) {
return ((Float)value).floatValue();
}
if (value instanceof String) {
return Float.valueOf(value.toString()).floatValue();
} else {
throw new OpenwireException(" cannot read a float from " + value.getClass().getName());
}
}
public double getDouble(String name) throws OpenwireException {
initializeReading();
Object value = map.get(name);
if (value == null) {
return 0;
}
if (value instanceof Double) {
return ((Double)value).doubleValue();
}
if (value instanceof Float) {
return ((Float)value).floatValue();
}
if (value instanceof String) {
return Float.valueOf(value.toString()).floatValue();
} else {
throw new OpenwireException(" cannot read a double from " + value.getClass().getName());
}
}
public String getString(String name) throws OpenwireException {
initializeReading();
Object value = map.get(name);
if (value == null) {
return null;
}
if (value instanceof byte[]) {
throw new OpenwireException("Use getBytes to read a byte array");
} else {
return value.toString();
}
}
public byte[] getBytes(String name) throws OpenwireException {
initializeReading();
Object value = map.get(name);
if (value instanceof byte[]) {
return (byte[])value;
} else {
throw new OpenwireException(" cannot read a byte[] from " + value.getClass().getName());
}
}
public Object getObject(String name) throws OpenwireException {
initializeReading();
return map.get(name);
}
public Enumeration<String> getMapNames() throws OpenwireException {
initializeReading();
return Collections.enumeration(map.keySet());
}
protected void put(String name, Object value) throws OpenwireException {
if (name == null) {
throw new IllegalArgumentException("The name of the property cannot be null.");
}
if (name.length() == 0) {
throw new IllegalArgumentException("The name of the property cannot be an emprty string.");
}
map.put(name, value);
}
public void setBoolean(String name, boolean value) throws OpenwireException {
initializeWriting();
put(name, value ? Boolean.TRUE : Boolean.FALSE);
}
public void setByte(String name, byte value) throws OpenwireException {
initializeWriting();
put(name, Byte.valueOf(value));
}
public void setShort(String name, short value) throws OpenwireException {
initializeWriting();
put(name, Short.valueOf(value));
}
public void setChar(String name, char value) throws OpenwireException {
initializeWriting();
put(name, Character.valueOf(value));
}
public void setInt(String name, int value) throws OpenwireException {
initializeWriting();
put(name, Integer.valueOf(value));
}
public void setLong(String name, long value) throws OpenwireException {
initializeWriting();
put(name, Long.valueOf(value));
}
public void setFloat(String name, float value) throws OpenwireException {
initializeWriting();
put(name, new Float(value));
}
public void setDouble(String name, double value) throws OpenwireException {
initializeWriting();
put(name, new Double(value));
}
public void setString(String name, String value) throws OpenwireException {
initializeWriting();
put(name, value);
}
public void setBytes(String name, byte[] value) throws OpenwireException {
initializeWriting();
if (value != null) {
put(name, value);
} else {
map.remove(name);
}
}
public void setBytes(String name, byte[] value, int offset, int length) throws OpenwireException {
initializeWriting();
byte[] data = new byte[length];
System.arraycopy(value, offset, data, 0, length);
put(name, data);
}
public void setObject(String name, Object value) throws OpenwireException {
initializeWriting();
if (value != null) {
// byte[] not allowed on properties
if (!(value instanceof byte[])) {
checkValidObject(value);
}
put(name, value);
} else {
put(name, null);
}
}
public boolean itemExists(String name) throws OpenwireException {
initializeReading();
return map.containsKey(name);
}
private void initializeReading() throws OpenwireException {
loadContent();
}
private void initializeWriting() throws OpenwireException {
checkReadOnlyBody();
setContent(null);
}
public String toString() {
return super.toString() + " ActiveMQMapMessage{ " + "theTable = " + map + " }";
}
public Map<String, Object> getContentMap() throws OpenwireException {
initializeReading();
return map;
}
}
| |
package org.python.types;
public class Bool extends org.python.types.Object {
public boolean value;
/**
* Return the python name for this class.
*/
public java.lang.String getPythonName() {
return "bool";
}
/**
* A utility method to update the internal value of this object.
*
* Used by __i*__ operations to do an in-place operation.
* obj must be of type org.python.types.Bool
*/
void setValue(org.python.Object obj) {
this.value = ((org.python.types.Bool) obj).value;
}
public Bool(boolean bool) {
super();
this.value = bool;
}
// public org.python.Object __new__() {
// throw new org.python.exceptions.NotImplementedError("bool.__new__() has not been implemented.");
// }
// public org.python.Object __init__() {
// throw new org.python.exceptions.NotImplementedError("bool.__init__() has not been implemented.");
// }
public org.python.types.Str __repr__() {
if (this.value) {
return new org.python.types.Str("True");
} else {
return new org.python.types.Str("False");
}
}
public org.python.types.Str __format__() {
throw new org.python.exceptions.NotImplementedError("bool.__format__() has not been implemented.");
}
public org.python.Object __lt__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__lt__() has not been implemented.");
}
public org.python.Object __le__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__le__() has not been implemented.");
}
public org.python.Object __eq__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__eq__() has not been implemented.");
}
public org.python.Object __ne__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__ne__() has not been implemented.");
}
public org.python.Object __gt__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__gt__() has not been implemented.");
}
public org.python.Object __ge__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__ge__() has not been implemented.");
}
public org.python.types.Bool __bool__() {
return new org.python.types.Bool(this.value);
}
public org.python.Object __getattribute__(java.lang.String name) {
throw new org.python.exceptions.NotImplementedError("bool.__getattribute__() has not been implemented.");
}
public void __setattr__(java.lang.String name, org.python.Object value) {
throw new org.python.exceptions.NotImplementedError("bool.__setattr__() has not been implemented.");
}
public void __delattr__(java.lang.String name) {
throw new org.python.exceptions.NotImplementedError("bool.__delattr__() has not been implemented.");
}
public org.python.types.List __dir__() {
throw new org.python.exceptions.NotImplementedError("bool.__dir__() has not been implemented.");
}
public org.python.Object __add__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__add__() has not been implemented.");
}
public org.python.Object __sub__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__sub__() has not been implemented.");
}
public org.python.Object __mul__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__mul__() has not been implemented.");
}
public org.python.Object __truediv__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__truediv__() has not been implemented.");
}
public org.python.Object __floordiv__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__floordiv__() has not been implemented.");
}
public org.python.Object __mod__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__mod__() has not been implemented.");
}
public org.python.Object __divmod__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__divmod__() has not been implemented.");
}
public org.python.Object __pow__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__pow__() has not been implemented.");
}
public org.python.Object __lshift__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__lshift__() has not been implemented.");
}
public org.python.Object __rshift__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rshift__() has not been implemented.");
}
public org.python.Object __and__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__and__() has not been implemented.");
}
public org.python.Object __xor__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__xor__() has not been implemented.");
}
public org.python.Object __or__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__or__() has not been implemented.");
}
public org.python.Object __radd__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__radd__() has not been implemented.");
}
public org.python.Object __rsub__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rsub__() has not been implemented.");
}
public org.python.Object __rmul__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rmul__() has not been implemented.");
}
public org.python.Object __rtruediv__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rtruediv__() has not been implemented.");
}
public org.python.Object __rfloordiv__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rfloordiv__() has not been implemented.");
}
public org.python.Object __rmod__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rmod__() has not been implemented.");
}
public org.python.Object __rdivmod__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rdivmod__() has not been implemented.");
}
public org.python.Object __rpow__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rpow__() has not been implemented.");
}
public org.python.Object __rlshift__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rlshift__() has not been implemented.");
}
public org.python.Object __rrshift__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rrshift__() has not been implemented.");
}
public org.python.Object __rand__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rand__() has not been implemented.");
}
public org.python.Object __rxor__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__rxor__() has not been implemented.");
}
public org.python.Object __ror__(org.python.Object other) {
throw new org.python.exceptions.NotImplementedError("bool.__ror__() has not been implemented.");
}
public org.python.Object __neg__() {
throw new org.python.exceptions.NotImplementedError("bool.__neg__() has not been implemented.");
}
public org.python.Object __pos__() {
throw new org.python.exceptions.NotImplementedError("bool.__pos__() has not been implemented.");
}
public org.python.Object __abs__() {
throw new org.python.exceptions.NotImplementedError("bool.__abs__() has not been implemented.");
}
public org.python.Object __invert__() {
throw new org.python.exceptions.NotImplementedError("bool.__invert__() has not been implemented.");
}
public org.python.types.Int __int__() {
throw new org.python.exceptions.NotImplementedError("bool.__int__() has not been implemented.");
}
public org.python.types.Float __float__() {
throw new org.python.exceptions.NotImplementedError("bool.__float__() has not been implemented.");
}
public org.python.Object __round__() {
throw new org.python.exceptions.NotImplementedError("bool.__round__() has not been implemented.");
}
}
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.shared.internal;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableTable;
import com.google.common.collect.Table;
import com.google.common.collect.Tables;
import com.google.errorprone.annotations.Immutable;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.function.Predicate;
import javax.annotation.Nullable;
/**
* Utility for applying deltemplate selection logic to an arbitrary set of values.
*
* <p>This object allows selection logic to be shared between both tofu and jbcsrc which use
* different runtime representations for templates, without needing to have hard dependencies on
* those runtime representations. For example, tofu uses {@code TemplateDelegateNode} and jbcsrc
* uses {@code CompiledTemplate}.
*
* <p>This logic should be kept in sync with the JS and Python runtime logic. See the JS {@code
* soy.$$getDelegateFn} and {@code soy.$$registerDelegateFn} methods.
*
* @param <T> The type of the values in the selector
*/
@Immutable(containerOf = "T")
public final class DelTemplateSelector<T> {
private final ImmutableTable<String, String, Group<T>> nameAndVariantToGroup;
private final ImmutableListMultimap<String, T> delTemplateNameToValues;
private DelTemplateSelector(Builder<T> builder) {
ImmutableTable.Builder<String, String, Group<T>> nameAndVariantBuilder =
ImmutableTable.builder();
ImmutableListMultimap.Builder<String, T> delTemplateNameToValuesBuilder =
ImmutableListMultimap.builder();
for (Table.Cell<String, String, Group.Builder<T>> entry :
builder.nameAndVariantToGroup.cellSet()) {
Group<T> group = entry.getValue().build();
nameAndVariantBuilder.put(entry.getRowKey(), entry.getColumnKey(), group);
String delTemplateName = entry.getRowKey();
if (group.defaultValue != null) {
delTemplateNameToValuesBuilder.put(delTemplateName, group.defaultValue);
}
delTemplateNameToValuesBuilder.putAll(delTemplateName, group.delpackageToValue.values());
}
this.nameAndVariantToGroup = nameAndVariantBuilder.build();
this.delTemplateNameToValues = delTemplateNameToValuesBuilder.build();
}
/**
* Returns a multimap from deltemplate name to every member (disregarding variant).
*
* <p>This is useful for compiler passes that need to validate all members of deltemplate group.
*/
public ImmutableListMultimap<String, T> delTemplateNameToValues() {
return delTemplateNameToValues;
}
public boolean hasDelTemplateNamed(String delTemplateName) {
return nameAndVariantToGroup.containsRow(delTemplateName);
}
/**
* Returns an active delegate for the given name, variant and active packages. If no active
* delegate if found for the {@code variant} the we fallback to a non variant lookup. Finally, we
* return {@code null} if no such template can be found.
*
* <p>See {@code soy.$$getDelegateFn} for the {@code JS} version
*/
@Nullable
public T selectTemplate(
String delTemplateName, String variant, Predicate<String> activeDelPackageSelector) {
Group<T> group = nameAndVariantToGroup.get(delTemplateName, variant);
if (group != null) {
T selection = group.select(activeDelPackageSelector);
if (selection != null) {
return selection;
}
}
if (!variant.isEmpty()) {
// Retry with an empty variant
group = nameAndVariantToGroup.get(delTemplateName, "");
if (group != null) {
return group.select(activeDelPackageSelector);
}
}
return null;
}
/** A Builder for DelTemplateSelector. */
public static final class Builder<T> {
private final Table<String, String, Group.Builder<T>> nameAndVariantToGroup =
Tables.newCustomTable(new LinkedHashMap<>(), LinkedHashMap::new);
/** Adds a template in the default delpackage. */
public T addDefault(String delTemplateName, String variant, T value) {
return getBuilder(delTemplateName, variant).setDefault(value);
}
/** Adds a deltemplate. */
public T add(String delTemplateName, String delpackage, String variant, T value) {
return getBuilder(delTemplateName, variant).add(delpackage, value);
}
private Group.Builder<T> getBuilder(String name, String variant) {
checkArgument(!name.isEmpty());
Group.Builder<T> v = nameAndVariantToGroup.get(name, variant);
if (v == null) {
v = new Group.Builder<>(name + (variant.isEmpty() ? "" : ":" + variant));
nameAndVariantToGroup.put(name, variant, v);
}
return v;
}
public DelTemplateSelector<T> build() {
return new DelTemplateSelector<>(this);
}
}
/** Represents all the templates for a given deltemplate name and variant value. */
@Immutable(containerOf = "T")
private static final class Group<T> {
final String formattedName;
@Nullable final T defaultValue;
final ImmutableMap<String, T> delpackageToValue;
private Group(Builder<T> builder) {
this.formattedName = checkNotNull(builder.formattedName);
this.defaultValue = builder.defaultValue;
this.delpackageToValue = ImmutableMap.copyOf(builder.delpackageToValue);
}
/**
* Returns the value from this group based on the current active packages, or the default if one
* exists.
*/
T select(Predicate<String> activeDelPackageSelector) {
Map.Entry<String, T> selected = null;
// Select whatever delpackage is active and ensure that only one is activated. If none are
// active use the default.
// This is analagous to what happens in JavaScript, see soy.$$registerDelegateFn. The main
// difference is that in JavaScript delegate conflicts are resolved/detected at code loading
// time. In Java it is only at rendering time because that is when the set of active packages
// is determined.
// In theory we could validate the Predicate against the whole DelTemplateSelector at the
// start of rendering which would flag erroneous Predicates even if no deltempate group
// containing the conflict is ever rendered. However, this sounds like a potentially expensive
// operation, so for now we delay detecting conflicts until selection time.
for (Map.Entry<String, T> entry : delpackageToValue.entrySet()) {
if (activeDelPackageSelector.test(entry.getKey())) {
if (selected != null) {
throw new IllegalArgumentException(
String.format(
"For delegate template '%s', found two active implementations with equal"
+ " priority in delegate packages '%s' and '%s'.",
formattedName, entry.getKey(), selected.getKey()));
}
selected = entry;
}
}
if (selected != null) {
return selected.getValue();
}
return defaultValue;
}
static final class Builder<T> {
final String formattedName;
Map<String, T> delpackageToValue = new LinkedHashMap<>();
T defaultValue;
Builder(String formattedName) {
this.formattedName = checkNotNull(formattedName);
}
T setDefault(T defaultValue) {
if (this.defaultValue != null) {
return this.defaultValue;
}
checkState(this.defaultValue == null);
this.defaultValue = checkNotNull(defaultValue);
return null;
}
T add(String delpackage, T value) {
checkArgument(!delpackage.isEmpty());
T prev = delpackageToValue.put(delpackage, checkNotNull(value));
return prev;
}
Group<T> build() {
return new Group<>(this);
}
}
}
}
| |
/*
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.css.compiler.passes;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.css.SourceCode;
import com.google.common.css.SourceCodeLocation;
import com.google.common.css.compiler.ast.CssDefinitionNode;
import com.google.common.css.compiler.ast.CssFunctionNode;
import com.google.common.css.compiler.ast.CssKeyframesNode;
import com.google.common.css.compiler.ast.CssLiteralNode;
import com.google.common.css.compiler.ast.CssMediaRuleNode;
import com.google.common.css.compiler.ast.CssSelectorNode;
import com.google.common.css.compiler.ast.CssValueNode;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Unit tests for {@link MapChunkAwareNodesToChunk}.
*
* <p>This test case can be extended, so that the tests are reused. The check* methods need to be
* overridden if the subclass expects a different result.
*
* @author dgajda@google.com (Damian Gajda)
*/
@RunWith(JUnit4.class)
public class MapChunkAwareNodesToChunkTest {
protected static final String F2 = "b";
protected static final String F1 = "a";
protected static final String F3 = "c";
protected static final String F4 = "d";
protected static final String F5 = "e";
protected static final String CS = "D";
protected static final String CA = "A";
protected static final String CB = "B";
protected static final String CC = "C";
protected static final ImmutableMap<String, String> FILE_TO_CHUNK =
ImmutableMap.<String, String>builder()
.put(F1, CA)
.put(F2, CA)
.put(F3, CB)
.put(F4, CC)
.put(F5, CS)
.build();
protected MapChunkAwareNodesToChunk<String> pass;
protected CssSelectorNode sel1a;
protected CssSelectorNode sel1b;
protected CssSelectorNode sel2a;
protected CssSelectorNode sel3a;
protected CssSelectorNode sel3b;
protected CssSelectorNode sel3c;
protected CssSelectorNode sel4c;
protected CssSelectorNode sel5a;
protected CssDefinitionNode def1a;
protected CssDefinitionNode def2a;
protected CssFunctionNode fun1b;
protected CssMediaRuleNode media3a;
protected CssKeyframesNode keyframes3b;
@Before
public void setUp() throws Exception {
pass = getPass(FILE_TO_CHUNK);
List<CssValueNode> parameters = ImmutableList.of();
SourceCode sourceCode1 = new SourceCode(F1, null);
SourceCodeLocation location1a =
new SourceCodeLocation(sourceCode1, 1, 1, 1, 2, 1, 1);
sel1a = new CssSelectorNode("a", location1a);
def1a = new CssDefinitionNode(
parameters, new CssLiteralNode("DEF1"), null, location1a);
SourceCodeLocation location1b =
new SourceCodeLocation(sourceCode1, 10, 2, 2, 11, 2, 2);
sel1b = new CssSelectorNode("b", location1b);
fun1b =
new CssFunctionNode(CssFunctionNode.Function.byName("url"), location1b);
SourceCode sourceCode2 = new SourceCode(F2, null);
SourceCodeLocation location2a =
new SourceCodeLocation(sourceCode2, 1, 1, 1, 2, 1, 1);
sel2a = new CssSelectorNode("a", location2a);
def2a = new CssDefinitionNode(
parameters, new CssLiteralNode("DEF2"), null, location2a);
SourceCode sourceCode3 = new SourceCode(F3, null);
SourceCodeLocation location3a =
new SourceCodeLocation(sourceCode3, 1, 1, 1, 2, 1, 1);
sel3a = new CssSelectorNode("a", location3a);
media3a = new CssMediaRuleNode();
media3a.setSourceCodeLocation(location3a);
SourceCodeLocation location3b =
new SourceCodeLocation(sourceCode3, 1, 1, 1, 2, 1, 1);
sel3b = new CssSelectorNode("b", location3b);
keyframes3b = new CssKeyframesNode(new CssLiteralNode("keyframes"));
keyframes3b.setSourceCodeLocation(location3b);
SourceCodeLocation location3c =
new SourceCodeLocation(sourceCode3, 10, 2, 2, 11, 2, 2);
sel3c = new CssSelectorNode("c", location3c);
SourceCode sourceCode4 = new SourceCode(F4, null);
SourceCodeLocation location4c =
new SourceCodeLocation(sourceCode4, 10, 2, 2, 11, 2, 2);
sel4c = new CssSelectorNode("c", location4c);
SourceCode sourceCode5 = new SourceCode(F5, null);
SourceCodeLocation location5a =
new SourceCodeLocation(sourceCode5, 1, 1, 1, 2, 1, 1);
sel5a = new CssSelectorNode("a", location5a);
}
@Test
public void testMapToChunk() {
setupEnterSelector();
setupEnterDefinition();
setupEnterFunctionNode();
setupEnterMediaRule();
setupEnterKeyframesRule();
checkEnterSelector();
checkEnterDefinition();
// Only one assert per node type, so these aren't put into their own
// functions.
assertThat(fun1b.getChunk()).isEqualTo(CA);
assertThat(media3a.getChunk()).isEqualTo(CB);
assertThat(keyframes3b.getChunk()).isEqualTo(CB);
}
@Test
public void testMissingFileToChunkMapping() {
Map<String, String> badFileToChunk =
ImmutableMap.<String ,String>builder()
.put(F1, CA)
.put(F3, CB)
.put(F4, CC)
.put(F5, CS)
.build();
pass = getPass(badFileToChunk);
try {
pass.enterSelector(sel2a);
Assert.fail("Node 2a does not have a file to chunk mapping");
} catch (NullPointerException expected) {
// OK
}
}
protected MapChunkAwareNodesToChunk<String> getPass(Map<String, String> fileToChunk) {
return new MapChunkAwareNodesToChunk<String>(null, fileToChunk);
}
protected void checkEnterSelector() {
assertThat(sel1a.getChunk()).isEqualTo(CA);
assertThat(sel1b.getChunk()).isEqualTo(CA);
assertThat(sel2a.getChunk()).isEqualTo(CA);
assertThat(sel3a.getChunk()).isEqualTo(CB);
assertThat(sel3b.getChunk()).isEqualTo(CB);
assertThat(sel3c.getChunk()).isEqualTo(CB);
assertThat(sel4c.getChunk()).isEqualTo(CC);
assertThat(sel5a.getChunk()).isEqualTo(CS);
}
protected void checkEnterDefinition() {
assertThat(def1a.getChunk()).isEqualTo(CA);
assertThat(def2a.getChunk()).isEqualTo(CA);
}
private void setupEnterSelector() {
assertThat(sel1a.getChunk()).isNull();
assertThat(sel1b.getChunk()).isNull();
assertThat(sel2a.getChunk()).isNull();
assertThat(sel3a.getChunk()).isNull();
assertThat(sel3b.getChunk()).isNull();
assertThat(sel3c.getChunk()).isNull();
assertThat(sel4c.getChunk()).isNull();
assertThat(sel5a.getChunk()).isNull();
pass.enterSelector(sel1a);
pass.enterSelector(sel1b);
pass.enterSelector(sel2a);
pass.enterSelector(sel3a);
pass.enterSelector(sel3b);
pass.enterSelector(sel3c);
pass.enterSelector(sel4c);
pass.enterSelector(sel5a);
}
private void setupEnterDefinition() {
assertThat(def1a.getChunk()).isNull();
assertThat(def2a.getChunk()).isNull();
pass.enterDefinition(def1a);
pass.enterDefinition(def2a);
}
private void setupEnterFunctionNode() {
assertThat(fun1b.getChunk()).isNull();
pass.enterFunctionNode(fun1b);
}
private void setupEnterMediaRule() {
assertThat(media3a.getChunk()).isNull();
pass.enterMediaRule(media3a);
}
private void setupEnterKeyframesRule() {
assertThat(keyframes3b.getChunk()).isNull();
pass.enterKeyframesRule(keyframes3b);
}
}
| |
package org.ovirt.engine.ui.webadmin.plugin;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import org.ovirt.engine.ui.common.auth.CurrentUser;
import org.ovirt.engine.ui.webadmin.plugin.api.ApiOptions;
import org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions;
import org.ovirt.engine.ui.webadmin.plugin.jsni.JsFunction.ErrorHandler;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.core.client.Scheduler.ScheduledCommand;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.IFrameElement;
import com.google.gwt.dom.client.Style.BorderStyle;
import com.google.gwt.dom.client.Style.Position;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.user.client.Command;
import com.google.inject.Inject;
/**
* The main component of WebAdmin UI plugin infrastructure.
* <p>
* This class has following responsibilities:
* <ul>
* <li>create and expose plugin API
* <li>define and load plugins
* <li>enforce standard plugin lifecycle
* </ul>
* <p>
* Should be bound as GIN eager singleton, created early on during application startup.
*/
public class PluginManager {
public interface PluginInvocationCondition {
boolean canInvoke(Plugin plugin);
}
private static final PluginInvocationCondition INVOKE_ANY_PLUGIN = new PluginInvocationCondition() {
@Override
public boolean canInvoke(Plugin plugin) {
return true;
}
};
private static final Logger logger = Logger.getLogger(PluginManager.class.getName());
// Maps plugin names to corresponding object representations
private final Map<String, Plugin> plugins = new HashMap<>();
// Maps plugin names to scheduled event handler functions invoked via Command interface
private final Map<String, List<Command>> scheduledFunctionCommands = new HashMap<>();
// Controls plugin invocation, allowing WebAdmin to call plugins only in a specific context
private boolean canInvokePlugins = false;
private final PluginUiFunctions uiFunctions;
private final CurrentUser user;
@Inject
public PluginManager(PluginUiFunctions uiFunctions, CurrentUser user) {
this.uiFunctions = uiFunctions;
this.user = user;
exposePluginApi();
defineAndLoadPlugins();
}
Plugin getPlugin(String pluginName) {
return plugins.get(pluginName);
}
Collection<Plugin> getPlugins() {
return plugins.values();
}
void addPlugin(Plugin plugin) {
plugins.put(plugin.getName(), plugin);
}
void scheduleFunctionCommand(String pluginName, Command command) {
if (!scheduledFunctionCommands.containsKey(pluginName)) {
scheduledFunctionCommands.put(pluginName, new ArrayList<Command>());
}
scheduledFunctionCommands.get(pluginName).add(command);
}
void invokeScheduledFunctionCommands(String pluginName) {
List<Command> commands = scheduledFunctionCommands.get(pluginName);
if (commands != null) {
for (Command c : commands) {
c.execute();
}
}
scheduledFunctionCommands.remove(pluginName);
}
void cancelScheduledFunctionCommands() {
scheduledFunctionCommands.clear();
}
/**
* Defines all plugins that were detected when serving WebAdmin host page, and loads them as necessary.
*/
void defineAndLoadPlugins() {
PluginDefinitions definitions = PluginDefinitions.instance();
if (definitions != null) {
JsArray<PluginMetaData> metaDataArray = definitions.getMetaDataArray();
for (int i = 0; i < metaDataArray.length(); i++) {
PluginMetaData pluginMetaData = metaDataArray.get(i);
if (pluginMetaData != null) {
defineAndLoadPlugin(pluginMetaData);
}
}
}
}
/**
* Defines a plugin from the given meta-data, and loads it as necessary.
*/
void defineAndLoadPlugin(PluginMetaData pluginMetaData) {
String pluginName = pluginMetaData.getName();
String pluginHostPageUrl = pluginMetaData.getHostPageUrl();
if (pluginName == null || pluginName.trim().isEmpty()) {
logger.warning("Plugin name cannot be null or empty"); //$NON-NLS-1$
return;
} else if (pluginHostPageUrl == null || pluginHostPageUrl.trim().isEmpty()) {
logger.warning("Plugin [" + pluginName + "] has null or empty host page URL"); //$NON-NLS-1$ //$NON-NLS-2$
return;
} else if (getPlugin(pluginName) != null) {
logger.warning("Plugin [" + pluginName + "] is already defined"); //$NON-NLS-1$ //$NON-NLS-2$
return;
}
// Create an iframe element used to load the plugin host page
IFrameElement iframe = Document.get().createIFrameElement();
iframe.setSrc(pluginHostPageUrl);
iframe.setFrameBorder(0);
iframe.getStyle().setPosition(Position.ABSOLUTE);
iframe.getStyle().setWidth(0, Unit.PT);
iframe.getStyle().setHeight(0, Unit.PT);
iframe.getStyle().setBorderStyle(BorderStyle.NONE);
Plugin plugin = new Plugin(pluginMetaData, iframe);
addPlugin(plugin);
logger.info("Plugin [" + pluginName + "] is defined to be loaded from URL " + pluginHostPageUrl); //$NON-NLS-1$ //$NON-NLS-2$
if (pluginMetaData.isEnabled()) {
loadPlugin(plugin);
}
}
/**
* Loads the given plugin by attaching the corresponding iframe element to DOM.
*/
void loadPlugin(Plugin plugin) {
if (plugin.isInState(PluginState.DEFINED)) {
logger.info("Loading plugin [" + plugin.getName() + "]"); //$NON-NLS-1$ //$NON-NLS-2$
Document.get().getBody().appendChild(plugin.getIFrameElement());
plugin.markAsLoading();
}
}
/**
* Called when WebAdmin enters the state that allows plugins to be invoked.
*/
public void enablePluginInvocation() {
canInvokePlugins = true;
// Try to initialize all plugins after the browser event loop returns
Scheduler.get().scheduleDeferred(new ScheduledCommand() {
@Override
public void execute() {
for (Plugin plugin : getPlugins()) {
initPlugin(plugin);
}
}
});
}
/**
* Called when WebAdmin leaves the state that allows plugins to be invoked.
*/
public void disablePluginInvocation() {
canInvokePlugins = false;
// Clean up scheduled event handler functions for all plugins,
// since we are leaving the current plugin invocation context
cancelScheduledFunctionCommands();
}
/**
* Invokes an event handler function on all plugins which are currently {@linkplain PluginState#IN_USE in use}.
* <p>
* {@code functionArgs} represents the argument list to use when calling the given function (can be {@code null}).
*/
public void invokePluginsNow(String functionName, JsArray<?> functionArgs) {
invokePluginsNow(functionName, functionArgs, INVOKE_ANY_PLUGIN);
}
/**
* Invokes an event handler function on all plugins which are currently {@linkplain PluginState#IN_USE in use} and
* meet the given condition.
* <p>
* {@code functionArgs} represents the argument list to use when calling the given function (can be {@code null}).
*/
public void invokePluginsNow(String functionName, JsArray<?> functionArgs, PluginInvocationCondition condition) {
if (canInvokePlugins) {
for (Plugin plugin : getPlugins()) {
if (plugin.isInState(PluginState.IN_USE) && condition.canInvoke(plugin)) {
invokePlugin(plugin, functionName, functionArgs);
}
}
}
}
/**
* Invokes an event handler function on all plugins which are currently {@linkplain PluginState#IN_USE in use}, and
* schedules invocation of the given function on all plugins that might be put in use later on.
* <p>
* {@code functionArgs} represents the argument list to use when calling the given function (can be {@code null}).
*/
public void invokePluginsNowOrLater(String functionName, JsArray<?> functionArgs) {
invokePluginsNowOrLater(functionName, functionArgs, INVOKE_ANY_PLUGIN);
}
/**
* Invokes an event handler function on all plugins which are currently {@linkplain PluginState#IN_USE in use} and
* meet the given condition, and schedules invocation of the given function on all plugins that might be put in use
* later on.
* <p>
* {@code functionArgs} represents the argument list to use when calling the given function (can be {@code null}).
*/
public void invokePluginsNowOrLater(final String functionName, final JsArray<?> functionArgs,
final PluginInvocationCondition condition) {
invokePluginsNow(functionName, functionArgs, condition);
for (final Plugin plugin : getPlugins()) {
if (!canInvokePlugins || !plugin.isInState(PluginState.IN_USE)) {
scheduleFunctionCommand(plugin.getName(), new Command() {
@Override
public void execute() {
if (canInvokePlugins && plugin.isInState(PluginState.IN_USE) && condition.canInvoke(plugin)) {
invokePlugin(plugin, functionName, functionArgs);
}
}
});
}
}
}
/**
* Invokes an event handler function on the given plugin.
* <p>
* No checks are performed here, make sure to call this method only in a context that fits the general plugin
* lifecycle.
* <p>
* If the function fails due to uncaught exception for the given plugin, that plugin will be automatically
* {@linkplain PluginState#FAILED removed from service}. Callers should therefore never call this method if the
* given plugin is already out of service.
* <p>
* Returns {@code true} if the function completed successfully, or {@code false} if an exception escaped the
* function call.
*/
boolean invokePlugin(final Plugin plugin, final String functionName, JsArray<?> functionArgs) {
final String pluginName = plugin.getName();
logger.info("Invoking event handler function [" + functionName + "] for plugin [" + pluginName + "]"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
return plugin.getEventHandlerFunction(functionName).invoke(functionArgs, new ErrorHandler() {
@Override
public void onError(String message) {
logger.severe("Exception caught while invoking event handler function [" + functionName //$NON-NLS-1$
+ "] for plugin [" + pluginName + "]: " + message); //$NON-NLS-1$ //$NON-NLS-2$
// Remove the given plugin from service
Document.get().getBody().removeChild(plugin.getIFrameElement());
plugin.markAsFailed();
logger.warning("Plugin [" + pluginName + "] removed from service due to failure"); //$NON-NLS-1$ //$NON-NLS-2$
}
});
}
/**
* Returns {@code true} when the given plugin can perform actions through the API.
* <p>
* More precisely, returns {@code true} when all of the following conditions are met:
* <ul>
* <li>WebAdmin is currently in state that allows plugins to be invoked
* <li>the plugin is either {@linkplain PluginState#INITIALIZING initializing} (actions performed from UiInit
* function), or {@linkplain PluginState#IN_USE in use} (actions performed from other event handler functions)
* </ul>
*/
boolean validatePluginAction(String pluginName) {
Plugin plugin = getPlugin(pluginName);
boolean pluginInitializingOrInUse = plugin != null
? plugin.isInState(PluginState.INITIALIZING) || plugin.isInState(PluginState.IN_USE) : false;
return canInvokePlugins && pluginInitializingOrInUse;
}
/**
* Registers an event handler object (object containing plugin event handler functions) for the given plugin.
*/
void registerPluginEventHandlerObject(String pluginName, JavaScriptObject eventHandlerObject) {
Plugin plugin = getPlugin(pluginName);
if (plugin == null || eventHandlerObject == null) {
return;
}
// Allow plugin event handler object to be set only once
if (plugin.getEventHandlerObject() == null) {
plugin.setEventHandlerObject(eventHandlerObject);
logger.info("Plugin [" + pluginName + "] has registered the event handler object"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
logger.warning("Plugin [" + pluginName + "] has already registered the event handler object"); //$NON-NLS-1$ //$NON-NLS-2$
}
}
/**
* Registers a custom API options object for the given plugin.
*/
void registerPluginApiOptionsObject(String pluginName, ApiOptions apiOptionsObject) {
Plugin plugin = getPlugin(pluginName);
if (plugin == null || apiOptionsObject == null) {
return;
}
plugin.setApiOptionsObject(apiOptionsObject);
logger.info("Plugin [" + pluginName + "] has registered custom API options object"); //$NON-NLS-1$ //$NON-NLS-2$
}
/**
* Indicates that the given plugin is {@linkplain PluginState#READY ready for use}.
*/
void pluginReady(String pluginName) {
Plugin plugin = getPlugin(pluginName);
if (plugin != null && plugin.isInState(PluginState.LOADING)) {
if (plugin.getEventHandlerObject() == null) {
logger.warning("Plugin [" + pluginName //$NON-NLS-1$
+ "] reports in as ready, but has no event handler object assigned"); //$NON-NLS-1$
return;
}
plugin.markAsReady();
logger.info("Plugin [" + pluginName + "] reports in as ready"); //$NON-NLS-1$ //$NON-NLS-2$
// Try to initialize the plugin, since the plugin might report in as ready
// after WebAdmin enters the state that allows plugins to be invoked
initPlugin(plugin);
}
}
/**
* Attempts to {@linkplain PluginState#INITIALIZING initialize} the given plugin by calling UiInit event handler
* function on the corresponding event handler object.
* <p>
* The UiInit function will be called just once during the lifetime of a plugin. More precisely, UiInit function
* will be called:
* <ul>
* <li>after the plugin reports in as {@linkplain PluginState#READY ready} <b>and</b> WebAdmin
* {@linkplain #enablePluginInvocation enters} the state that allows plugins to be invoked
* <li>before any other event handler functions are invoked by the plugin infrastructure
* </ul>
* <p>
* As part of attempting to initialize the given plugin, all event handler functions that have been
* {@linkplain #invokePluginsNowOrLater scheduled} for such plugin will be invoked immediately after the UiInit
* function completes successfully.
*/
void initPlugin(Plugin plugin) {
if (!canInvokePlugins) {
return;
}
String pluginName = plugin.getName();
// Try to invoke UiInit event handler function
if (plugin.isInState(PluginState.READY)) {
logger.info("Initializing plugin [" + pluginName + "]"); //$NON-NLS-1$ //$NON-NLS-2$
plugin.markAsInitializing();
if (invokePlugin(plugin, "UiInit", null)) { //$NON-NLS-1$
plugin.markAsInUse();
logger.info("Plugin [" + pluginName + "] is initialized and in use now"); //$NON-NLS-1$ //$NON-NLS-2$
}
}
// Try to invoke all event handler functions scheduled for this plugin
if (plugin.isInState(PluginState.IN_USE)) {
invokeScheduledFunctionCommands(pluginName);
}
}
/**
* Returns the configuration object associated with the given plugin, or {@code null} if no such object exists.
*/
JavaScriptObject getConfigObject(String pluginName) {
Plugin plugin = getPlugin(pluginName);
return plugin != null ? plugin.getMetaData().getConfigObject() : null;
}
private native void exposePluginApi() /*-{
var ctx = this;
var uiFunctions = ctx.@org.ovirt.engine.ui.webadmin.plugin.PluginManager::uiFunctions;
var user = ctx.@org.ovirt.engine.ui.webadmin.plugin.PluginManager::user;
var validatePluginAction = function(pluginName) {
return ctx.@org.ovirt.engine.ui.webadmin.plugin.PluginManager::validatePluginAction(Ljava/lang/String;)(pluginName);
};
var getEntityType = function(entityTypeName) {
return @org.ovirt.engine.ui.webadmin.plugin.entity.EntityType::from(Ljava/lang/String;)(entityTypeName);
};
var sanitizeObject = function(object) {
return (object != null) ? object : {};
};
// Define pluginApi function used to construct specific Plugin API instances
var pluginApi = function(pluginName) {
return new pluginApi.fn.init(pluginName);
};
// Define pluginApi.fn as an alias to pluginApi prototype
pluginApi.fn = pluginApi.prototype = {
pluginName: null, // Initialized in constructor function
// Constructor function
init: function(pluginName) {
this.pluginName = pluginName;
return this;
},
// Registers plugin event handler functions for later invocation
register: function(eventHandlerObject) {
ctx.@org.ovirt.engine.ui.webadmin.plugin.PluginManager::registerPluginEventHandlerObject(Ljava/lang/String;Lcom/google/gwt/core/client/JavaScriptObject;)(this.pluginName,sanitizeObject(eventHandlerObject));
},
// Registers custom API options object associated with the plugin
options: function(apiOptionsObject) {
ctx.@org.ovirt.engine.ui.webadmin.plugin.PluginManager::registerPluginApiOptionsObject(Ljava/lang/String;Lorg/ovirt/engine/ui/webadmin/plugin/api/ApiOptions;)(this.pluginName,sanitizeObject(apiOptionsObject));
},
// Indicates that the plugin is ready for use
ready: function() {
ctx.@org.ovirt.engine.ui.webadmin.plugin.PluginManager::pluginReady(Ljava/lang/String;)(this.pluginName);
},
// Returns the configuration object associated with the plugin
configObject: function() {
return ctx.@org.ovirt.engine.ui.webadmin.plugin.PluginManager::getConfigObject(Ljava/lang/String;)(this.pluginName);
},
// TODO(vszocs) inject API functions into "pluginApi.fn" dynamically using EventBus
addMainTab: function(label, historyToken, contentUrl, options) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::addMainTab(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lorg/ovirt/engine/ui/webadmin/plugin/api/TabOptions;)(label,historyToken,contentUrl,sanitizeObject(options));
}
},
addSubTab: function(entityTypeName, label, historyToken, contentUrl, options) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::addSubTab(Lorg/ovirt/engine/ui/webadmin/plugin/entity/EntityType;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lorg/ovirt/engine/ui/webadmin/plugin/api/TabOptions;)(getEntityType(entityTypeName),label,historyToken,contentUrl,sanitizeObject(options));
}
},
setTabContentUrl: function(historyToken, contentUrl) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::setTabContentUrl(Ljava/lang/String;Ljava/lang/String;)(historyToken,contentUrl);
}
},
setTabAccessible: function(historyToken, tabAccessible) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::setTabAccessible(Ljava/lang/String;Z)(historyToken,tabAccessible);
}
},
addMainTabActionButton: function(entityTypeName, label, actionButtonInterface) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::addMainTabActionButton(Lorg/ovirt/engine/ui/webadmin/plugin/entity/EntityType;Ljava/lang/String;Lorg/ovirt/engine/ui/webadmin/plugin/api/ActionButtonInterface;)(getEntityType(entityTypeName),label,sanitizeObject(actionButtonInterface));
}
},
addSubTabActionButton: function(mainTabEntityTypeName, subTabEntityTypeName, label, actionButtonInterface) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::addSubTabActionButton(Lorg/ovirt/engine/ui/webadmin/plugin/entity/EntityType;Lorg/ovirt/engine/ui/webadmin/plugin/entity/EntityType;Ljava/lang/String;Lorg/ovirt/engine/ui/webadmin/plugin/api/ActionButtonInterface;)(getEntityType(mainTabEntityTypeName),getEntityType(subTabEntityTypeName),label,sanitizeObject(actionButtonInterface));
}
},
showDialog: function(title, dialogToken, contentUrl, width, height, options) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::showDialog(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lorg/ovirt/engine/ui/webadmin/plugin/api/DialogOptions;)(title,dialogToken,contentUrl,width,height,sanitizeObject(options));
}
},
setDialogContentUrl: function(dialogToken, contentUrl) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::setDialogContentUrl(Ljava/lang/String;Ljava/lang/String;)(dialogToken,contentUrl);
}
},
closeDialog: function(dialogToken) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::closeDialog(Ljava/lang/String;)(dialogToken);
}
},
revealPlace: function(historyToken) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::revealPlace(Ljava/lang/String;)(historyToken);
}
},
setSearchString: function(searchString) {
if (validatePluginAction(this.pluginName)) {
uiFunctions.@org.ovirt.engine.ui.webadmin.plugin.api.PluginUiFunctions::setSearchString(Ljava/lang/String;)(searchString);
}
},
loginUserName: function() {
if (validatePluginAction(this.pluginName)) {
return user.@org.ovirt.engine.ui.common.auth.CurrentUser::getFullUserName()();
}
},
loginUserId: function() {
if (validatePluginAction(this.pluginName)) {
return user.@org.ovirt.engine.ui.common.auth.CurrentUser::getUserId()();
}
}
};
// Give init function the pluginApi prototype for later instantiation
pluginApi.fn.init.prototype = pluginApi.fn;
// Expose pluginApi function as a global object
$wnd.pluginApi = pluginApi;
}-*/;
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.geodistance;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.search.geo.GeoDistance;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* A geo distance builder allowing to create a facet of distances from a specific location including the
* number of hits within each distance range, and aggregated data (like totals of either the distance or
* cusotm value fields).
*
*
*/
public class GeoDistanceFacetBuilder extends AbstractFacetBuilder {
private String fieldName;
private String valueFieldName;
private double lat;
private double lon;
private String geohash;
private GeoDistance geoDistance;
private DistanceUnit unit;
private Map<String, Object> params;
private String valueScript;
private String lang;
private List<Entry> entries = Lists.newArrayList();
/**
* Constructs a new geo distance with the provided facet name.
*/
public GeoDistanceFacetBuilder(String name) {
super(name);
}
/**
* The geo point field that will be used to extract the document location(s).
*/
public GeoDistanceFacetBuilder field(String fieldName) {
this.fieldName = fieldName;
return this;
}
/**
* A custom value field (numeric) that will be used to provide aggregated data for each facet (for example, total).
*/
public GeoDistanceFacetBuilder valueField(String valueFieldName) {
this.valueFieldName = valueFieldName;
return this;
}
/**
* A custom value script (result is numeric) that will be used to provide aggregated data for each facet (for example, total).
*/
public GeoDistanceFacetBuilder valueScript(String valueScript) {
this.valueScript = valueScript;
return this;
}
/**
* The language of the {@link #valueScript(String)} script.
*/
public GeoDistanceFacetBuilder lang(String lang) {
this.lang = lang;
return this;
}
/**
* Parameters for {@link #valueScript(String)} to improve performance when executing the same script with different parameters.
*/
public GeoDistanceFacetBuilder scriptParam(String name, Object value) {
if (params == null) {
params = Maps.newHashMap();
}
params.put(name, value);
return this;
}
/**
* The point to create the range distance facets from.
*
* @param lat latitude.
* @param lon longitude.
*/
public GeoDistanceFacetBuilder point(double lat, double lon) {
this.lat = lat;
this.lon = lon;
return this;
}
/**
* The latitude to create the range distance facets from.
*/
public GeoDistanceFacetBuilder lat(double lat) {
this.lat = lat;
return this;
}
/**
* The longitude to create the range distance facets from.
*/
public GeoDistanceFacetBuilder lon(double lon) {
this.lon = lon;
return this;
}
/**
* The geohash of the geo point to create the range distance facets from.
*/
public GeoDistanceFacetBuilder geohash(String geohash) {
this.geohash = geohash;
return this;
}
/**
* The geo distance type used to compute the distance.
*/
public GeoDistanceFacetBuilder geoDistance(GeoDistance geoDistance) {
this.geoDistance = geoDistance;
return this;
}
/**
* Adds a range entry with explicit from and to.
*
* @param from The from distance limit
* @param to The to distance limit
*/
public GeoDistanceFacetBuilder addRange(double from, double to) {
entries.add(new Entry(from, to));
return this;
}
/**
* Adds a range entry with explicit from and unbounded to.
*
* @param from the from distance limit, to is unbounded.
*/
public GeoDistanceFacetBuilder addUnboundedTo(double from) {
entries.add(new Entry(from, Double.POSITIVE_INFINITY));
return this;
}
/**
* Adds a range entry with explicit to and unbounded from.
*
* @param to the to distance limit, from is unbounded.
*/
public GeoDistanceFacetBuilder addUnboundedFrom(double to) {
entries.add(new Entry(Double.NEGATIVE_INFINITY, to));
return this;
}
/**
* The distance unit to use. Defaults to {@link org.elasticsearch.common.unit.DistanceUnit#KILOMETERS}
*/
public GeoDistanceFacetBuilder unit(DistanceUnit unit) {
this.unit = unit;
return this;
}
/**
* Marks the facet to run in a global scope, not bounded by any query.
*/
public GeoDistanceFacetBuilder global(boolean global) {
super.global(global);
return this;
}
/**
* Marks the facet to run in a specific scope.
*/
@Override
public GeoDistanceFacetBuilder scope(String scope) {
super.scope(scope);
return this;
}
public GeoDistanceFacetBuilder facetFilter(FilterBuilder filter) {
this.facetFilter = filter;
return this;
}
/**
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
* nested objects matching the path to be computed into the facet.
*/
public GeoDistanceFacetBuilder nested(String nested) {
this.nested = nested;
return this;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (fieldName == null) {
throw new SearchSourceBuilderException("field must be set on geo_distance facet for facet [" + name + "]");
}
if (entries.isEmpty()) {
throw new SearchSourceBuilderException("at least one range must be defined for geo_distance facet [" + name + "]");
}
builder.startObject(name);
builder.startObject(GeoDistanceFacet.TYPE);
if (geohash != null) {
builder.field(fieldName, geohash);
} else {
builder.startArray(fieldName).value(lon).value(lat).endArray();
}
if (valueFieldName != null) {
builder.field("value_field", valueFieldName);
}
if (valueScript != null) {
builder.field("value_script", valueScript);
if (lang != null) {
builder.field("lang", lang);
}
if (this.params != null) {
builder.field("params", this.params);
}
}
builder.startArray("ranges");
for (Entry entry : entries) {
builder.startObject();
if (!Double.isInfinite(entry.from)) {
builder.field("from", entry.from);
}
if (!Double.isInfinite(entry.to)) {
builder.field("to", entry.to);
}
builder.endObject();
}
builder.endArray();
if (unit != null) {
builder.field("unit", unit);
}
if (geoDistance != null) {
builder.field("distance_type", geoDistance.name().toLowerCase());
}
builder.endObject();
addFilterFacetAndGlobal(builder, params);
builder.endObject();
return builder;
}
private static class Entry {
final double from;
final double to;
private Entry(double from, double to) {
this.from = from;
this.to = to;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.dag.history.utils;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.tez.dag.api.DAG;
import org.apache.tez.dag.api.EdgeProperty;
import org.apache.tez.dag.api.EdgeProperty.DataMovementType;
import org.apache.tez.dag.api.EdgeProperty.DataSourceType;
import org.apache.tez.dag.api.EdgeProperty.SchedulingType;
import org.apache.tez.dag.api.DataSinkDescriptor;
import org.apache.tez.dag.api.DataSourceDescriptor;
import org.apache.tez.dag.api.GroupInputEdge;
import org.apache.tez.dag.api.InputDescriptor;
import org.apache.tez.dag.api.OutputCommitterDescriptor;
import org.apache.tez.dag.api.OutputDescriptor;
import org.apache.tez.dag.api.ProcessorDescriptor;
import org.apache.tez.dag.api.Vertex;
import org.apache.tez.dag.api.records.DAGProtos.DAGPlan;
import org.apache.tez.dag.records.TezDAGID;
import org.apache.tez.dag.records.TezVertexID;
import org.apache.tez.runtime.api.OutputCommitter;
import org.codehaus.jettison.json.JSONException;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.collect.Sets;
public class TestDAGUtils {
private DAGPlan createDAG() {
// Create a plan with 3 vertices: A, B, C. Group(A,B)->C
Configuration conf = new Configuration(false);
int dummyTaskCount = 1;
Resource dummyTaskResource = Resource.newInstance(1, 1);
org.apache.tez.dag.api.Vertex v1 = Vertex.create("vertex1",
ProcessorDescriptor.create("Processor").setHistoryText("vertex1 Processor HistoryText"),
dummyTaskCount, dummyTaskResource);
v1.addDataSource("input1", DataSourceDescriptor.create(InputDescriptor.create(
"input.class").setHistoryText("input HistoryText"), null, null));
org.apache.tez.dag.api.Vertex v2 = Vertex.create("vertex2",
ProcessorDescriptor.create("Processor").setHistoryText("vertex2 Processor HistoryText"),
dummyTaskCount, dummyTaskResource);
org.apache.tez.dag.api.Vertex v3 = Vertex.create("vertex3",
ProcessorDescriptor.create("Processor").setHistoryText("vertex3 Processor HistoryText"),
dummyTaskCount, dummyTaskResource);
DAG dag = DAG.create("testDag");
dag.setDAGInfo("dagInfo");
String groupName1 = "uv12";
org.apache.tez.dag.api.VertexGroup uv12 = dag.createVertexGroup(groupName1, v1, v2);
OutputDescriptor outDesc = OutputDescriptor.create("output.class")
.setHistoryText("uvOut HistoryText");
OutputCommitterDescriptor ocd =
OutputCommitterDescriptor.create(OutputCommitter.class.getName());
uv12.addDataSink("uvOut", DataSinkDescriptor.create(outDesc, ocd, null));
v3.addDataSink("uvOut", DataSinkDescriptor.create(outDesc, ocd, null));
GroupInputEdge e1 = GroupInputEdge.create(uv12, v3,
EdgeProperty.create(DataMovementType.SCATTER_GATHER,
DataSourceType.PERSISTED, SchedulingType.SEQUENTIAL,
OutputDescriptor.create("dummy output class").setHistoryText("Dummy History Text"),
InputDescriptor.create("dummy input class").setHistoryText("Dummy History Text")),
InputDescriptor.create("merge.class").setHistoryText("Merge HistoryText"));
dag.addVertex(v1);
dag.addVertex(v2);
dag.addVertex(v3);
dag.addEdge(e1);
return dag.createDag(conf, null, null, null, true);
}
@Test
@SuppressWarnings("unchecked")
public void testConvertDAGPlanToATSMap() throws IOException, JSONException {
DAGPlan dagPlan = createDAG();
Map<String,TezVertexID> idNameMap = new HashMap<String, TezVertexID>();
ApplicationId appId = ApplicationId.newInstance(1, 1);
TezDAGID dagId = TezDAGID.getInstance(appId, 1);
TezVertexID vId1 = TezVertexID.getInstance(dagId, 1);
TezVertexID vId2 = TezVertexID.getInstance(dagId, 2);
TezVertexID vId3 = TezVertexID.getInstance(dagId, 3);
idNameMap.put("vertex1", vId1);
idNameMap.put("vertex2", vId2);
idNameMap.put("vertex3", vId3);
Map<String, Object> atsMap = DAGUtils.convertDAGPlanToATSMap(dagPlan);
Assert.assertTrue(atsMap.containsKey(DAGUtils.DAG_NAME_KEY));
Assert.assertEquals("testDag", atsMap.get(DAGUtils.DAG_NAME_KEY));
Assert.assertTrue(atsMap.containsKey(DAGUtils.DAG_INFO_KEY));
Assert.assertEquals("dagInfo", atsMap.get(DAGUtils.DAG_INFO_KEY));
Assert.assertEquals(dagPlan.getName(), atsMap.get(DAGUtils.DAG_NAME_KEY));
Assert.assertTrue(atsMap.containsKey("version"));
Assert.assertEquals(1, atsMap.get("version"));
Assert.assertTrue(atsMap.containsKey(DAGUtils.VERTICES_KEY));
Assert.assertTrue(atsMap.containsKey(DAGUtils.EDGES_KEY));
Assert.assertTrue(atsMap.containsKey(DAGUtils.VERTEX_GROUPS_KEY));
Assert.assertEquals(3, ((Collection<?>) atsMap.get(DAGUtils.VERTICES_KEY)).size());
Set<String> inEdgeIds = new HashSet<String>();
Set<String> outEdgeIds = new HashSet<String>();
int additionalInputCount = 0;
int additionalOutputCount = 0;
for (Object o : ((Collection<?>) atsMap.get(DAGUtils.VERTICES_KEY))) {
Map<String, Object> v = (Map<String, Object>) o;
Assert.assertTrue(v.containsKey(DAGUtils.VERTEX_NAME_KEY));
String vName = (String)v.get(DAGUtils.VERTEX_NAME_KEY);
Assert.assertTrue(v.containsKey(DAGUtils.PROCESSOR_CLASS_KEY));
Assert.assertTrue(v.containsKey(DAGUtils.USER_PAYLOAD_AS_TEXT));
if (v.containsKey(DAGUtils.IN_EDGE_IDS_KEY)) {
inEdgeIds.addAll(((Collection<String>) v.get(DAGUtils.IN_EDGE_IDS_KEY)));
}
if (v.containsKey(DAGUtils.OUT_EDGE_IDS_KEY)) {
outEdgeIds.addAll(((Collection<String>) v.get(DAGUtils.OUT_EDGE_IDS_KEY)));
}
Assert.assertTrue(idNameMap.containsKey(vName));
String procPayload = vName + " Processor HistoryText";
Assert.assertEquals(procPayload, v.get(DAGUtils.USER_PAYLOAD_AS_TEXT));
if (v.containsKey(DAGUtils.ADDITIONAL_INPUTS_KEY)) {
additionalInputCount += ((Collection<?>) v.get(DAGUtils.ADDITIONAL_INPUTS_KEY)).size();
for (Object input : ((Collection<?>) v.get(DAGUtils.ADDITIONAL_INPUTS_KEY))) {
Map<String, Object> inputMap = (Map<String, Object>) input;
Assert.assertTrue(inputMap.containsKey(DAGUtils.NAME_KEY));
Assert.assertTrue(inputMap.containsKey(DAGUtils.CLASS_KEY));
Assert.assertFalse(inputMap.containsKey(DAGUtils.INITIALIZER_KEY));
Assert.assertEquals("input HistoryText", inputMap.get(DAGUtils.USER_PAYLOAD_AS_TEXT));
}
}
if (v.containsKey(DAGUtils.ADDITIONAL_OUTPUTS_KEY)) {
additionalOutputCount += ((Collection<?>) v.get(DAGUtils.ADDITIONAL_OUTPUTS_KEY)).size();
for (Object output : ((Collection<?>) v.get(DAGUtils.ADDITIONAL_OUTPUTS_KEY))) {
Map<String, Object> outputMap = (Map<String, Object>) output;
Assert.assertTrue(outputMap.containsKey(DAGUtils.NAME_KEY));
Assert.assertTrue(outputMap.containsKey(DAGUtils.CLASS_KEY));
Assert.assertTrue(outputMap.containsKey(DAGUtils.INITIALIZER_KEY));
Assert.assertEquals("uvOut HistoryText", outputMap.get(DAGUtils.USER_PAYLOAD_AS_TEXT));
}
}
}
// 1 input
Assert.assertEquals(1, additionalInputCount);
// 3 outputs due to vertex group
Assert.assertEquals(3, additionalOutputCount);
// 1 edge translates to 2 due to vertex group
Assert.assertEquals(2, inEdgeIds.size());
Assert.assertEquals(2, outEdgeIds.size());
for (Object o : ((Collection<?>) atsMap.get(DAGUtils.EDGES_KEY))) {
Map<String, Object> e = (Map<String, Object>) o;
Assert.assertTrue(inEdgeIds.contains(e.get(DAGUtils.EDGE_ID_KEY)));
Assert.assertTrue(outEdgeIds.contains(e.get(DAGUtils.EDGE_ID_KEY)));
Assert.assertTrue(e.containsKey(DAGUtils.INPUT_VERTEX_NAME_KEY));
Assert.assertTrue(e.containsKey(DAGUtils.OUTPUT_VERTEX_NAME_KEY));
Assert.assertEquals(DataMovementType.SCATTER_GATHER.name(),
e.get(DAGUtils.DATA_MOVEMENT_TYPE_KEY));
Assert.assertEquals(DataSourceType.PERSISTED.name(), e.get(DAGUtils.DATA_SOURCE_TYPE_KEY));
Assert.assertEquals(SchedulingType.SEQUENTIAL.name(), e.get(DAGUtils.SCHEDULING_TYPE_KEY));
Assert.assertEquals("dummy output class", e.get(DAGUtils.EDGE_SOURCE_CLASS_KEY));
Assert.assertEquals("dummy input class", e.get(DAGUtils.EDGE_DESTINATION_CLASS_KEY));
Assert.assertEquals("Dummy History Text", e.get(DAGUtils.OUTPUT_USER_PAYLOAD_AS_TEXT));
Assert.assertEquals("Dummy History Text", e.get(DAGUtils.INPUT_USER_PAYLOAD_AS_TEXT));
}
for (Object o : ((Collection<?>) atsMap.get(DAGUtils.VERTEX_GROUPS_KEY))) {
Map<String, Object> e = (Map<String, Object>) o;
Assert.assertEquals("uv12", e.get(DAGUtils.VERTEX_GROUP_NAME_KEY));
Assert.assertTrue(e.containsKey(DAGUtils.VERTEX_GROUP_MEMBERS_KEY));
Assert.assertTrue(e.containsKey(DAGUtils.VERTEX_GROUP_OUTPUTS_KEY));
Assert.assertTrue(e.containsKey(DAGUtils.VERTEX_GROUP_EDGE_MERGED_INPUTS_KEY));
}
}
}
| |
/* $Id$ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.manifoldcf.crawler.connectors.alfresco;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.alfresco.webservice.types.NamedValue;
import org.apache.commons.lang.StringUtils;
import org.apache.manifoldcf.agents.interfaces.RepositoryDocument;
import org.apache.manifoldcf.core.common.DateParser;
import org.apache.manifoldcf.core.interfaces.ManifoldCFException;
/**
* Utility class dedicatd to manage Alfresco properties
* @author Piergiorgio Lucidi
*
*/
public class PropertiesUtils {
private static final String PROP_CONTENT_PREFIX = "contentUrl";
private static final String PROP_CONTENT_SEP = "|";
private static final String PROP_MIMETYPE_SEP = "=";
private static final String PROP_MODIFIED = Constants.createQNameString(Constants.NAMESPACE_CONTENT_MODEL, "modified");
public static String[] getPropertyValues(NamedValue[] properties, String qname){
String[] propertyValues = null;
for(NamedValue property : properties){
if(property.getName().endsWith(qname)){
if(property.getIsMultiValue()){
propertyValues = property.getValues();
} else {
propertyValues = new String[]{property.getValue()};
}
}
}
return propertyValues;
}
public static void ingestProperties(RepositoryDocument rd, NamedValue[] properties, List<NamedValue> contentProperties) throws ManifoldCFException, ParseException{
for(NamedValue property : properties){
if(property!=null && StringUtils.isNotEmpty(property.getName())){
if(property.getIsMultiValue()){
String[] values = property.getValues();
if(values!=null){
for (String value : values) {
if(StringUtils.isNotEmpty(value)){
rd.addField(property.getName(), value);
}
}
}
} else {
if(StringUtils.isNotEmpty(property.getValue())){
rd.addField(property.getName(), property.getValue());
}
}
}
}
String fileName = StringUtils.EMPTY;
String[] propertyValues = PropertiesUtils.getPropertyValues(properties, Constants.PROP_NAME);
if(propertyValues!=null && propertyValues.length>0){
fileName = propertyValues[0];
}
String mimeType = PropertiesUtils.getMimeType(contentProperties);
Date createdDate = PropertiesUtils.getDatePropertyValue(properties, Constants.PROP_CREATED);
Date modifiedDate = PropertiesUtils.getDatePropertyValue(properties, PROP_MODIFIED);
if(StringUtils.isNotEmpty(fileName)){
rd.setFileName(fileName);
}
if(StringUtils.isNotEmpty(mimeType)){
rd.setMimeType(mimeType);
}
if(createdDate!=null){
rd.setCreatedDate(createdDate);
}
if(modifiedDate!=null){
rd.setModifiedDate(modifiedDate);
}
}
/**
*
* @param properties
* @return a list of binary properties for the current node
*/
public static List<NamedValue> getContentProperties(NamedValue[] properties){
List<NamedValue> contentProperties = new ArrayList<NamedValue>();
if(properties!=null){
for (NamedValue property : properties) {
if(property!=null){
if(property.getIsMultiValue()!=null && !property.getIsMultiValue()){
if(StringUtils.isNotEmpty(property.getValue())
&& property.getValue().startsWith(PROP_CONTENT_PREFIX)){
contentProperties.add(property);
}
}
}
}
}
return contentProperties;
}
/**
* Build the Alfresco node identifier
* @param properties
* @return the node reference for the current document
*/
public static String getNodeReference(NamedValue[] properties){
String nodeReference = StringUtils.EMPTY;
String storeProtocol = StringUtils.EMPTY;
String storeId = StringUtils.EMPTY;
String uuid = StringUtils.EMPTY;
if(properties!=null){
for (NamedValue property : properties) {
if(Constants.PROP_STORE_PROTOCOL.equals(property.getName())){
storeProtocol = property.getValue();
} else if(Constants.PROP_STORE_ID.equals(property.getName())){
storeId = property.getValue();
} else if(Constants.PROP_NODE_UUID.equals(property.getName())){
uuid = property.getValue();
}
}
}
if(StringUtils.isNotEmpty(storeProtocol)
&& StringUtils.isNotEmpty(storeId)
&& StringUtils.isNotEmpty(uuid)) {
nodeReference = storeProtocol+"://"+storeId+"/"+uuid;
}
return nodeReference;
}
/**
*
* @param properties
* @return version label of the latest version of the node
*/
public static String getVersionLabel(NamedValue[] properties){
String[] versionLabelList = PropertiesUtils.getPropertyValues(properties, Constants.PROP_VERSION_LABEL);
String versionLabel = StringUtils.EMPTY;
if(versionLabelList!=null && versionLabelList.length>0){
versionLabel = versionLabelList[0];
}
return versionLabel;
}
/**
* This method returns the mimetype of the default content defined for the node.
* Notice that more than one binary can be defined in a custom model of Alfresco and also that
* it could exist some contents that don't have a binary
* @param contentProperties
* @return mimetype of the default content property
*/
public static String getMimeType(List<NamedValue> contentProperties){
if(contentProperties!=null && contentProperties.size()>0){
Iterator<NamedValue> i = contentProperties.iterator();
while(i.hasNext()){
NamedValue contentProperty = i.next();
if(Constants.PROP_CONTENT.equals(contentProperty.getName())){
String defaultContentPropertyValue = contentProperty.getValue();
String[] contentSplitted = StringUtils.split(defaultContentPropertyValue, PROP_CONTENT_SEP);
if (contentSplitted.length > 1) {
String[] mimeTypeSplitted = StringUtils.split(contentSplitted[1], PROP_MIMETYPE_SEP);
return mimeTypeSplitted[1];
}
return contentSplitted[0];
}
}
}
return StringUtils.EMPTY;
}
/**
*
* @param properties
* @return version label of the latest version of the node
* @throws ParseException
*/
public static Date getDatePropertyValue(NamedValue[] properties, String qname) throws ParseException{
Date date = null;
if(properties!=null && properties.length>0){
String[] propertyValues = PropertiesUtils.getPropertyValues(properties, qname);
if(propertyValues!=null && propertyValues.length>0){
String dateString = propertyValues[0];
if(StringUtils.isNotEmpty(dateString)){
date = DateParser.parseISO8601Date(dateString);
}
}
}
return date;
}
}
| |
package com.thaiopensource.relaxng.output.xsd;
import java.util.HashSet;
import java.util.Set;
import com.thaiopensource.relaxng.edit.AbstractVisitor;
import com.thaiopensource.relaxng.edit.AnyNameNameClass;
import com.thaiopensource.relaxng.edit.AttributePattern;
import com.thaiopensource.relaxng.edit.ComponentVisitor;
import com.thaiopensource.relaxng.edit.CompositePattern;
import com.thaiopensource.relaxng.edit.DataPattern;
import com.thaiopensource.relaxng.edit.DefineComponent;
import com.thaiopensource.relaxng.edit.DivComponent;
import com.thaiopensource.relaxng.edit.ElementPattern;
import com.thaiopensource.relaxng.edit.EmptyPattern;
import com.thaiopensource.relaxng.edit.GroupPattern;
import com.thaiopensource.relaxng.edit.IncludeComponent;
import com.thaiopensource.relaxng.edit.InterleavePattern;
import com.thaiopensource.relaxng.edit.ListPattern;
import com.thaiopensource.relaxng.edit.MixedPattern;
import com.thaiopensource.relaxng.edit.NsNameNameClass;
import com.thaiopensource.relaxng.edit.OneOrMorePattern;
import com.thaiopensource.relaxng.edit.OptionalPattern;
import com.thaiopensource.relaxng.edit.Pattern;
import com.thaiopensource.relaxng.edit.PatternVisitor;
import com.thaiopensource.relaxng.edit.RefPattern;
import com.thaiopensource.relaxng.edit.TextPattern;
import com.thaiopensource.relaxng.edit.UnaryPattern;
import com.thaiopensource.relaxng.edit.ValuePattern;
import com.thaiopensource.relaxng.edit.ZeroOrMorePattern;
import com.thaiopensource.relaxng.output.common.ErrorReporter;
import com.thaiopensource.util.VoidValue;
public class RestrictionsChecker
{
private final SchemaInfo si;
private final ErrorReporter er;
private final Set <Pattern> checkedPatterns = new HashSet <Pattern> ();
private static final int DISALLOW_ELEMENT = 0x1;
private static final int DISALLOW_ATTRIBUTE = 0x2;
private static final int DISALLOW_LIST = 0x4;
private static final int DISALLOW_TEXT = 0x8;
private static final int DISALLOW_EMPTY = 0x10;
private static final int DISALLOW_DATA = 0x20;
private static final int DISALLOW_GROUP = 0x40;
private static final int DISALLOW_INTERLEAVE = 0x80;
private static final int DISALLOW_ONE_OR_MORE = 0x100;
private static final int START_DISALLOW = DISALLOW_ATTRIBUTE |
DISALLOW_LIST |
DISALLOW_TEXT |
DISALLOW_DATA |
DISALLOW_EMPTY |
DISALLOW_GROUP |
DISALLOW_INTERLEAVE |
DISALLOW_ONE_OR_MORE;
private static final int LIST_DISALLOW = DISALLOW_ATTRIBUTE |
DISALLOW_ELEMENT |
DISALLOW_TEXT |
DISALLOW_LIST |
DISALLOW_INTERLEAVE;
private static final int DATA_EXCEPT_DISALLOW = DISALLOW_ATTRIBUTE |
DISALLOW_ELEMENT |
DISALLOW_LIST |
DISALLOW_EMPTY |
DISALLOW_TEXT |
DISALLOW_GROUP |
DISALLOW_INTERLEAVE |
DISALLOW_ONE_OR_MORE;
private static final int ATTRIBUTE_DISALLOW = DISALLOW_ATTRIBUTE | DISALLOW_ELEMENT;
private final PatternVisitor <VoidValue> startVisitor = new Visitor ("start", START_DISALLOW);
private final PatternVisitor <VoidValue> topLevelVisitor = new ListVisitor (null, 0);
private final PatternVisitor <VoidValue> elementVisitor = new ElementVisitor ();
private final PatternVisitor <VoidValue> elementRepeatVisitor = new ElementRepeatVisitor ();
private final PatternVisitor <VoidValue> elementRepeatGroupVisitor = new Visitor ("element_repeat_group",
DISALLOW_ATTRIBUTE);
private final PatternVisitor <VoidValue> elementRepeatInterleaveVisitor = new Visitor ("element_repeat_interleave",
DISALLOW_ATTRIBUTE);
private final PatternVisitor <VoidValue> attributeVisitor = new Visitor ("attribute", ATTRIBUTE_DISALLOW);
private final PatternVisitor <VoidValue> listVisitor = new ListVisitor ("list", LIST_DISALLOW);
private final PatternVisitor <VoidValue> dataExceptVisitor = new Visitor ("data_except", DATA_EXCEPT_DISALLOW);
class Visitor extends AbstractVisitor
{
private final String contextKey;
private final int flags;
Visitor (final String contextKey, final int flags)
{
this.contextKey = contextKey;
this.flags = flags;
}
private boolean checkContext (final int flag, final String patternName, final Pattern p)
{
if ((flags & flag) != 0)
{
er.error ("illegal_contains", er.getLocalizer ().message (contextKey), patternName, p.getSourceLocation ());
return false;
}
else
return true;
}
@Override
public VoidValue visitGroup (final GroupPattern p)
{
if (checkContext (DISALLOW_GROUP, "group", p))
{
checkGroup (p);
super.visitGroup (p);
}
return VoidValue.VOID;
}
@Override
public VoidValue visitInterleave (final InterleavePattern p)
{
if (checkContext (DISALLOW_INTERLEAVE, "interleave", p))
{
checkGroup (p);
super.visitInterleave (p);
}
return VoidValue.VOID;
}
@Override
public VoidValue visitElement (final ElementPattern p)
{
if (checkContext (DISALLOW_ELEMENT, "element", p) && !alreadyChecked (p))
p.getChild ().accept (elementVisitor);
return VoidValue.VOID;
}
@Override
public VoidValue visitAttribute (final AttributePattern p)
{
if (checkContext (DISALLOW_ATTRIBUTE, "attribute", p) && !alreadyChecked (p))
p.getChild ().accept (attributeVisitor);
return VoidValue.VOID;
}
@Override
public VoidValue visitData (final DataPattern p)
{
if (checkContext (DISALLOW_DATA, "data", p) && !alreadyChecked (p))
{
final Pattern except = p.getExcept ();
if (except != null)
except.accept (dataExceptVisitor);
}
return VoidValue.VOID;
}
@Override
public VoidValue visitValue (final ValuePattern p)
{
checkContext (DISALLOW_DATA, "value", p);
return VoidValue.VOID;
}
@Override
public VoidValue visitList (final ListPattern p)
{
if (checkContext (DISALLOW_LIST, "list", p) && !alreadyChecked (p))
p.getChild ().accept (listVisitor);
return VoidValue.VOID;
}
@Override
public VoidValue visitEmpty (final EmptyPattern p)
{
checkContext (DISALLOW_EMPTY, "empty", p);
return VoidValue.VOID;
}
@Override
public VoidValue visitOptional (final OptionalPattern p)
{
if (checkContext (DISALLOW_EMPTY, "optional", p))
super.visitOptional (p);
return VoidValue.VOID;
}
@Override
public VoidValue visitText (final TextPattern p)
{
checkContext (DISALLOW_TEXT, "text", p);
return VoidValue.VOID;
}
@Override
public VoidValue visitMixed (final MixedPattern p)
{
if (checkContext (DISALLOW_TEXT, "mixed", p))
{
if (si.getChildType (p.getChild ()).contains (ChildType.DATA))
er.error ("mixed_data", p.getSourceLocation ());
super.visitMixed (p);
}
return VoidValue.VOID;
}
@Override
public VoidValue visitOneOrMore (final OneOrMorePattern p)
{
if (checkContext (DISALLOW_ONE_OR_MORE, "oneOrMore", p))
{
checkNoDataUnlessInList (p, "oneOrMore");
super.visitOneOrMore (p);
}
return VoidValue.VOID;
}
@Override
public VoidValue visitZeroOrMore (final ZeroOrMorePattern p)
{
if (checkContext (DISALLOW_ONE_OR_MORE, "zeroOrMore", p))
{
checkNoDataUnlessInList (p, "zeroOrMore");
super.visitZeroOrMore (p);
}
return VoidValue.VOID;
}
@Override
public VoidValue visitRef (final RefPattern p)
{
return si.getBody (p).accept (this);
}
void checkNoDataUnlessInList (final UnaryPattern p, final String patternName)
{
if (!inList () && si.getChildType (p.getChild ()).contains (ChildType.DATA))
er.error ("not_in_list", patternName, p.getSourceLocation ());
}
void checkGroup (final CompositePattern p)
{
int simpleCount = 0;
boolean hadComplex = false;
for (final Pattern child : p.getChildren ())
{
final ChildType ct = si.getChildType (child);
final boolean simple = ct.contains (ChildType.DATA);
final boolean complex = ct.contains (ChildType.TEXT) || ct.contains (ChildType.ELEMENT);
if ((complex && simpleCount > 0) || (simple && hadComplex))
{
er.error ("group_data_other_children",
p instanceof GroupPattern ? "group" : "interleave",
p.getSourceLocation ());
return;
}
if (simple)
simpleCount++;
if (complex)
hadComplex = true;
}
if (simpleCount > 1)
{
if (p instanceof InterleavePattern)
er.error ("interleave_data", p.getSourceLocation ());
else
if (!inList ())
er.error ("group_data", p.getSourceLocation ());
}
}
boolean inList ()
{
return false;
}
}
class ListVisitor extends Visitor
{
public ListVisitor (final String contextKey, final int flags)
{
super (contextKey, flags);
}
@Override
boolean inList ()
{
return true;
}
}
class ElementVisitor extends Visitor
{
ElementVisitor ()
{
super (null, 0);
}
@Override
public VoidValue visitAttribute (final AttributePattern p)
{
p.getNameClass ().accept (this);
return super.visitAttribute (p);
}
@Override
public VoidValue visitZeroOrMore (final ZeroOrMorePattern p)
{
return elementRepeatVisitor.visitZeroOrMore (p);
}
@Override
public VoidValue visitOneOrMore (final OneOrMorePattern p)
{
return elementRepeatVisitor.visitOneOrMore (p);
}
@Override
public VoidValue visitAnyName (final AnyNameNameClass nc)
{
er.error ("any_name_attribute_not_repeated", nc.getSourceLocation ());
return VoidValue.VOID;
}
@Override
public VoidValue visitNsName (final NsNameNameClass nc)
{
er.error ("ns_name_attribute_not_repeated", nc.getSourceLocation ());
return VoidValue.VOID;
}
}
class ElementRepeatVisitor extends Visitor
{
ElementRepeatVisitor ()
{
super (null, 0);
}
@Override
public VoidValue visitGroup (final GroupPattern p)
{
return elementRepeatGroupVisitor.visitGroup (p);
}
@Override
public VoidValue visitInterleave (final InterleavePattern p)
{
return elementRepeatInterleaveVisitor.visitInterleave (p);
}
}
class GrammarVisitor implements ComponentVisitor <VoidValue>
{
public VoidValue visitDiv (final DivComponent c)
{
c.componentsAccept (this);
return VoidValue.VOID;
}
public VoidValue visitDefine (final DefineComponent c)
{
if (c.getName () != DefineComponent.START)
c.getBody ().accept (topLevelVisitor);
return VoidValue.VOID;
}
public VoidValue visitInclude (final IncludeComponent c)
{
si.getSchema (c.getUri ()).componentsAccept (this);
return VoidValue.VOID;
}
}
private RestrictionsChecker (final SchemaInfo si, final ErrorReporter er)
{
this.si = si;
this.er = er;
final Pattern start = si.getStart ();
if (start != null)
start.accept (startVisitor);
si.getGrammar ().componentsAccept (new GrammarVisitor ());
}
static void check (final SchemaInfo si, final ErrorReporter er)
{
new RestrictionsChecker (si, er);
}
private boolean alreadyChecked (final Pattern p)
{
if (checkedPatterns.contains (p))
return true;
else
{
checkedPatterns.add (p);
return false;
}
}
}
| |
// **********************************************************************
//
// <copyright>
//
// BBN Technologies
// 10 Moulton Street
// Cambridge, MA 02138
// (617) 873-8000
//
// Copyright (C) BBNT Solutions LLC. All rights reserved.
//
// </copyright>
// **********************************************************************
//
// $Source: /cvs/distapps/openmap/src/openmap/com/bbn/openmap/omGraphics/OffsetGrabPoint.java,v $
// $RCSfile: OffsetGrabPoint.java,v $
// $Revision: 1.6 $
// $Date: 2004/10/14 18:06:14 $
// $Author: dietrick $
//
// **********************************************************************
package com.bbn.openmap.omGraphics;
import java.util.Hashtable;
/**
* An OffsetGrabPoint is one that manages other grab points. When it is moved,
* the other GrabPoints on its internal list are moved the same amount in pixel
* space.
*/
public class OffsetGrabPoint extends GrabPoint {
/** The list of GrabPoints to move when this point moves. */
protected Hashtable<GrabPoint, Offset> offsetPoints;
/**
* Create the OffsetGrabPoint at a certain window location.
*
* @param x horizontal pixel location from left side of window.
* @param y vertical pixel location from top side of window.
*/
public OffsetGrabPoint(int x, int y) {
this(x, y, DEFAULT_RADIUS);
}
/**
* Create the OffsetGrabPoint at a certain window location.
*
* @param x horizontal pixel location from left side of window.
* @param y vertical pixel location from top side of window.
* @param radius the pixel radius of the point.
*/
public OffsetGrabPoint(int x, int y, int radius) {
super(x, y, radius);
offsetPoints = new Hashtable<GrabPoint, Offset>();
}
/**
* Add a GrabPoint to the internal list.
*/
public GrabPoint addGrabPoint(GrabPoint gp) {
if (gp == null) {
com.bbn.openmap.util.Debug.error("OffsetGrabPoint: adding null grab point!");
return null;
}
if (offsetPoints == null) {
offsetPoints = new Hashtable<GrabPoint, Offset>();
}
offsetPoints.put(gp, new Offset(gp));
return gp;
}
/**
* Remove a GrabPoint to the internal list.
*/
public GrabPoint removeGrabPoint(GrabPoint rgp) {
if (offsetPoints != null) {
Offset offset = offsetPoints.remove(rgp);
if (offset != null) {
return offset.gp;
}
}
return null;
}
/**
* Called when the position of the OffsetGrabPoint has moved. Does not
* adjust the offsets.
*/
public void set(int x, int y) {
super.set(x, y);
}
/**
* Called when the X position of the OffsetGrabPoint has moved. Does not
* adjust the offsets.
*/
public void setX(int x) {
super.setX(x);
}
/**
* Called when the Y position of the OffsetGrabPoint has moved. Does not
* adjust the offsets.
*/
public void setY(int y) {
super.setY(y);
}
/**
* Called when the other grab points may have moved, and the offset
* distances should be changed internally for the Offset objects.
*/
public void set() {
updateOffsets();
}
/**
* Flag used as a lock to prevent StackOverflowErrors, in case this
* OffetGrabPoint is unwittingly a child of itself.
*/
protected boolean overflowLock = false;
/**
* Go through all the Offset elements and changes their position on the map.
* Should be called when the OffsetGrabPoint has been moved and you want to
* move all the GrabPoints in its list.
*/
public synchronized void moveOffsets() {
if (!overflowLock) {
overflowLock = true;
for (Offset offset : offsetPoints.values()) {
offset.move();
}
overflowLock = false;
}
}
/**
* Go through all the Offset elements and update the relative position to
* this grab point. Should be called when you set the position of the
* OffsetGrabPoint and you want to set the offset distances of all the
* GrabPoints in the internal list.
*/
public synchronized void updateOffsets() {
if (!overflowLock) {
overflowLock = true;
for (Offset offset : offsetPoints.values()) {
offset.update();
}
overflowLock = false;
}
}
public void clear() {
offsetPoints.clear();
}
protected void finalize() {
offsetPoints.clear();
}
/**
* A wrapper class of the internal GrabPoints. Contains their pixel offset
* distance from the OffsetGrabPoint.
*/
public class Offset {
public GrabPoint gp;
public int offsetX;
public int offsetY;
public Offset(GrabPoint grabPoint) {
gp = grabPoint;
update();
}
/**
* Update resets the pixel offsets from the OffsetGrabPoint, to the
* current distances between the GrabPoint and the OffsetGrabPoint.
*/
public void update() {
offsetX = gp.getX() - getX();
offsetY = gp.getY() - getY();
if (gp instanceof OffsetGrabPoint) {
((OffsetGrabPoint) gp).updateOffsets();
}
}
/**
* Move relocates the GrabPoint to the current position of the
* OffsetGrabPoint plus the offset distances.
*/
public void move() {
int newX = getX() + offsetX;
int newY = getY() + offsetY;
if (gp instanceof HorizontalGrabPoint) {
((HorizontalGrabPoint) gp).set(newX, newY, true);
} else if (gp instanceof VerticalGrabPoint) {
((VerticalGrabPoint) gp).set(newX, newY, true);
} else {
gp.set(newX, newY);
}
if (gp instanceof OffsetGrabPoint) {
((OffsetGrabPoint) gp).moveOffsets();
}
}
}
}
| |
package com.huffingtonpost.chronos.servlet;
import java.io.IOException;
import org.apache.zookeeper.KeeperException;
import java.sql.SQLException;
import java.util.*;
import java.util.Map.Entry;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.joda.time.DateTime;
import org.joda.time.DateTimeConstants;
import org.joda.time.DateTimeZone;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.apache.log4j.Logger;
import com.huffingtonpost.chronos.agent.*;
import com.huffingtonpost.chronos.model.*;
@Controller
@RequestMapping("/api")
public class ChronosController {
enum ExMessages {
NOT_FOUND
}
public static Logger LOG = Logger.getLogger(ChronosController.class);
private final JobDao jobDao;
private final AgentDriver agentDriver;
private final AgentConsumer agentConsumer;
private final ArrayList<SupportedDriver> drivers;
private static final Response SUCCESS = new Response("success");
@Autowired
public ChronosController(JobDao jobDao, AgentDriver agentDriver,
AgentConsumer agentConsumer,
ArrayList<SupportedDriver> drivers) {
this.jobDao = jobDao;
this.agentDriver = agentDriver;
this.agentConsumer = agentConsumer;
this.drivers = drivers;
}
@ExceptionHandler(Exception.class)
@ResponseBody
public Response internalExceptionHandler(Exception exception,
HttpServletRequest request, HttpServletResponse response) {
String message = exception.getMessage();
LOG.error(message);
if (exception instanceof NotFoundException) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
} else {
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
return new Response(message);
}
@RequestMapping(value="/jobs", method=RequestMethod.GET)
public @ResponseBody List<JobSpec> getJobs() {
return jobDao.getJobs();
}
@RequestMapping(value="/jobs/history", method=RequestMethod.GET)
public @ResponseBody List<CallableJob>
history(@RequestParam(value="id", required=false) Long id,
@RequestParam(value="limit", required=true, defaultValue="100") Integer limit) {
Map<Long, CallableJob> toRet = jobDao.getJobRuns(id, limit);
return new ArrayList<>(toRet.values());
}
@RequestMapping(value="/jobs/future", method=RequestMethod.GET)
public @ResponseBody List<FutureRunInfo>
future(@RequestParam(value="id", required=false) Long id,
@RequestParam(value="limit", required=true, defaultValue="100") Integer limit) {
List<FutureRunInfo> toRet = getJobFuture(id, limit);
int endIdx = limit > toRet.size() ? toRet.size() : limit;
return toRet.subList(0, endIdx);
}
public static DateTime calcNextRunTime(final DateTime from, JobSpec job) {
DateTime toRet = from;
switch (job.getInterval()) {
case Hourly:
toRet = toRet.withMinuteOfHour(job.getStartMinute());
if (toRet.getMillis() <= from.getMillis()) {
toRet = toRet.plusHours(1);
}
break;
case Daily:
toRet = toRet.withHourOfDay(job.getStartHour());
toRet = toRet.withMinuteOfHour(job.getStartMinute());
if (toRet.getMillis() <= from.getMillis()) {
toRet = toRet.plusDays(1);
}
break;
case Weekly:
toRet = toRet.withHourOfDay(job.getStartHour());
toRet = toRet.withMinuteOfHour(job.getStartMinute());
toRet = toRet.withDayOfWeek(job.getStartDay());
if (toRet.getMillis() <= from.getMillis()) {
toRet = toRet.plusDays(7);
}
break;
case Monthly:
toRet = toRet.withHourOfDay(job.getStartHour());
toRet = toRet.withMinuteOfHour(job.getStartMinute());
if (toRet.getMillis() <= from.getMillis()) {
toRet = toRet.plusMonths(1).withDayOfMonth(1);
}
break;
}
return toRet.withMillisOfSecond(0).withSecondOfMinute(0);
}
public List<FutureRunInfo> getJobFuture(Long id, int limit) {
List<FutureRunInfo> toRet = new ArrayList<>();
List<JobSpec> iterJobs;
if (id == null) {
iterJobs = jobDao.getJobs();
} else {
iterJobs = Arrays.asList(new JobSpec[]{ jobDao.getJob(id)});
}
if (iterJobs.size() == 0) {
return toRet;
}
DateTime from = new DateTime().withZone(DateTimeZone.UTC);
while (toRet.size() < limit) {
for (JobSpec job : iterJobs) {
String jobName = job.getName();
DateTime nextRun = calcNextRunTime(from, job);
FutureRunInfo fri =
new FutureRunInfo(jobName, nextRun);
toRet.add(fri);
}
Collections.sort(toRet);
from = toRet.get(toRet.size() - 1).getTime();
}
return toRet;
}
@RequestMapping(value="/job/{id}", method=RequestMethod.GET)
public @ResponseBody JobSpec getJob(@PathVariable("id") Long id)
throws NotFoundException {
JobSpec aJob = jobDao.getJob(id);
if (aJob == null) {
throw new NotFoundException(
String.format("Job with id \"%d\" was not found", id));
}
return aJob;
}
@RequestMapping(value="/job/version/{id}", method=RequestMethod.GET)
public @ResponseBody List<JobSpec> getJobVersions(@PathVariable("id") Long id)
throws NotFoundException {
List<JobSpec> versions = jobDao.getJobVersions(id);
if (versions == null || versions.size() == 0) {
throw new NotFoundException(
String.format("Job versions for id \"%d\" were not found", id));
}
return versions;
}
@RequestMapping(value="/sources", method=RequestMethod.GET)
public @ResponseBody ArrayList<SupportedDriver> getDataSources() {
return drivers;
}
private void verifyJob(JobSpec aJob){
if (aJob.getResultQuery() != null && !aJob.getResultQuery().isEmpty() &&
!aJob.getResultQuery().toLowerCase().contains("limit")){
throw new RuntimeException(Messages.RESULTQUERY_MUST_HAVELIMIT);
}
if (aJob.getResultQuery() != null ) {
if (aJob.getResultEmail() == null) {
throw new RuntimeException(Messages.RESULTQUERY_MUST_HAVE_RESULT_EMAILS);
}
if (aJob.getResultEmail().size() == 0) {
throw new RuntimeException(Messages.RESULTQUERY_MUST_HAVE_RESULT_EMAILS);
}
}
if (aJob.getName() == null || aJob.getName().isEmpty()) {
throw new RuntimeException(Messages.JOB_NAME);
}
if (aJob.getStartMinute() < 0 || aJob.getStartMinute() > 59) {
throw new RuntimeException(Messages.START_MINUTE);
}
if (aJob.getStartHour() < 0 || aJob.getStartHour() > 23) {
throw new RuntimeException(Messages.START_HOUR);
}
if (aJob.getStartDay() < DateTimeConstants.MONDAY ||
aJob.getStartDay() > DateTimeConstants.SUNDAY) {
throw new RuntimeException(Messages.START_DAY);
}
}
public static Map<String, Long> assembleIdResp(long id) {
Map<String, Long> aMap = new HashMap<>();
aMap.put("id", id);
return aMap;
}
@RequestMapping(value="/job", method=RequestMethod.POST)
public @ResponseBody Map<String, Long> createJob(@RequestBody final JobSpec aJob) {
verifyJob(aJob);
long id = jobDao.createJob(aJob);
return assembleIdResp(id);
}
@RequestMapping(value="/job/{id}", method=RequestMethod.PUT)
public @ResponseBody Response updateJob(@PathVariable("id") Long id,
@RequestBody final JobSpec aJob) throws NotFoundException {
getJob(id);
verifyJob(aJob);
jobDao.updateJob(aJob);
return SUCCESS;
}
@RequestMapping(value="/job/{id}", method=RequestMethod.DELETE)
public @ResponseBody Response deleteJob(@PathVariable("id") Long id)
throws NotFoundException {
JobSpec aJob = getJob(id);
jobDao.deleteJob(aJob.getId());
return SUCCESS;
}
/**
* Removed as an endpoint but leaving code here in case we need it again in
* the future.
*/
public @ResponseBody List<Map<String,String>> getJobResults(
@RequestParam("id") Long id,
@RequestParam("limit") int limit) throws NotFoundException {
JobSpec aJob = getJob(id);
try {
return jobDao.getJobResults(aJob, limit);
} catch (ClassNotFoundException | InstantiationException |
IllegalAccessException | SQLException ex) {
LOG.error(ex);
throw new RuntimeException(ex.getMessage());
}
}
@RequestMapping(value="/running", method=RequestMethod.GET)
public @ResponseBody List<PlannedJob> getRunning(
@RequestParam(value="id", required=false) Long id)
throws IOException, KeeperException, InterruptedException {
List<PlannedJob> toRet = new ArrayList<>();
for (Entry<Long, CallableJob> entry :
jobDao.getJobRuns(null, AgentConsumer.LIMIT_JOB_RUNS).entrySet()){
boolean isDone = entry.getValue().isDone();
if (!isDone){
toRet.add(entry.getValue().getPlannedJob());
}
}
return toRet;
}
@RequestMapping(value="/queue", method=RequestMethod.GET)
public @ResponseBody List<PlannedJob> getQueue(
@RequestParam(value="id", required=false) Long id)
throws IOException, KeeperException, InterruptedException {
return jobDao.getQueue(id);
}
@RequestMapping(value="/queue", method=RequestMethod.POST)
public @ResponseBody Response queueJob(@RequestBody final PlannedJob aJob) {
jobDao.addToQueue(aJob);
return SUCCESS;
}
@RequestMapping(value="/queue", method=RequestMethod.DELETE)
public @ResponseBody Response cancelJob(@RequestBody final PlannedJob aJob) throws NotFoundException {
int num = jobDao.cancelJob(aJob);
if (num == 1) {
return SUCCESS;
} else {
throw new NotFoundException("Job was not found in queue");
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.inheritanceToDelegation;
import com.intellij.codeInsight.NullableNotNullManager;
import com.intellij.codeInsight.daemon.impl.analysis.JavaHighlightUtil;
import com.intellij.codeInsight.generation.GenerateMembersUtil;
import com.intellij.codeInsight.generation.OverrideImplementExploreUtil;
import com.intellij.find.findUsages.PsiElement2UsageTargetAdapter;
import com.intellij.lang.findUsages.DescriptiveNameUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.search.searches.ClassInheritorsSearch;
import com.intellij.psi.util.*;
import com.intellij.refactoring.BaseRefactoringProcessor;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.inheritanceToDelegation.usageInfo.*;
import com.intellij.refactoring.ui.ConflictsDialog;
import com.intellij.refactoring.util.CommonRefactoringUtil;
import com.intellij.refactoring.util.ConflictsUtil;
import com.intellij.refactoring.util.RefactoringUIUtil;
import com.intellij.refactoring.util.classMembers.ClassMemberReferencesVisitor;
import com.intellij.refactoring.util.classRefs.ClassInstanceScanner;
import com.intellij.refactoring.util.classRefs.ClassReferenceScanner;
import com.intellij.refactoring.util.classRefs.ClassReferenceSearchingScanner;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewDescriptor;
import com.intellij.usages.UsageInfoToUsageConverter;
import com.intellij.usages.UsageTarget;
import com.intellij.usages.UsageViewManager;
import com.intellij.usages.UsageViewPresentation;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.VisibilityUtil;
import com.intellij.util.containers.HashMap;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author dsl
*/
public class InheritanceToDelegationProcessor extends BaseRefactoringProcessor {
private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.inheritanceToDelegation.InheritanceToDelegationProcessor");
private final PsiClass myClass;
private final String myInnerClassName;
private final boolean myIsDelegateOtherMembers;
private final Set<PsiClass> myDelegatedInterfaces;
private final Set<PsiMethod> myDelegatedMethods;
private final HashMap<PsiMethod,String> myDelegatedMethodsVisibility;
private final Set<PsiMethod> myOverriddenMethods;
private final PsiClass myBaseClass;
private final Set<PsiMember> myBaseClassMembers;
private final String myFieldName;
private final String myGetterName;
private final boolean myGenerateGetter;
private final Set<PsiClass> myBaseClassBases;
private Set<PsiClass> myClassImplementedInterfaces;
private final PsiElementFactory myFactory;
private final PsiClassType myBaseClassType;
private final PsiManager myManager;
private final boolean myIsInnerClassNeeded;
private Set<PsiClass> myClassInheritors;
private HashSet<PsiMethod> myAbstractDelegatedMethods;
private final Map<PsiClass, PsiSubstitutor> mySuperClassesToSubstitutors = new HashMap<PsiClass, PsiSubstitutor>();
public InheritanceToDelegationProcessor(Project project,
PsiClass aClass,
@NotNull PsiClass targetBaseClass,
String fieldName,
String innerClassName,
PsiClass[] delegatedInterfaces,
PsiMethod[] delegatedMethods,
boolean delegateOtherMembers,
boolean generateGetter) {
super(project);
myClass = aClass;
myInnerClassName = innerClassName;
myIsDelegateOtherMembers = delegateOtherMembers;
myManager = myClass.getManager();
myFactory = JavaPsiFacade.getInstance(myManager.getProject()).getElementFactory();
myBaseClass = targetBaseClass;
LOG.assertTrue(
// && !myBaseClass.isInterface()
myBaseClass.getQualifiedName() == null || !myBaseClass.getQualifiedName().equals(CommonClassNames.JAVA_LANG_OBJECT), myBaseClass);
myBaseClassMembers = getAllBaseClassMembers();
myBaseClassBases = getAllBases();
myBaseClassType = myFactory.createType(myBaseClass, getSuperSubstitutor (myBaseClass));
myIsInnerClassNeeded = InheritanceToDelegationUtil.isInnerClassNeeded(myClass, myBaseClass);
myFieldName = fieldName;
final String propertyName = JavaCodeStyleManager.getInstance(myProject).variableNameToPropertyName(myFieldName, VariableKind.FIELD);
myGetterName = GenerateMembersUtil.suggestGetterName(propertyName, myBaseClassType, myProject);
myGenerateGetter = generateGetter;
myDelegatedInterfaces = new LinkedHashSet<PsiClass>();
addAll(myDelegatedInterfaces, delegatedInterfaces);
myDelegatedMethods = new LinkedHashSet<PsiMethod>();
addAll(myDelegatedMethods, delegatedMethods);
myDelegatedMethodsVisibility = new HashMap<PsiMethod, String>();
for (PsiMethod method : myDelegatedMethods) {
MethodSignature signature = method.getSignature(getSuperSubstitutor(method.getContainingClass()));
PsiMethod overridingMethod = MethodSignatureUtil.findMethodBySignature(myClass, signature, false);
if (overridingMethod != null) {
myDelegatedMethodsVisibility.put(method,
VisibilityUtil.getVisibilityModifier(overridingMethod.getModifierList()));
}
}
myOverriddenMethods = getOverriddenMethods();
}
private PsiSubstitutor getSuperSubstitutor(final PsiClass superClass) {
PsiSubstitutor result = mySuperClassesToSubstitutors.get(superClass);
if (result == null) {
result = TypeConversionUtil.getSuperClassSubstitutor(superClass, myClass, PsiSubstitutor.EMPTY);
mySuperClassesToSubstitutors.put(superClass, result);
}
return result;
}
@NotNull
protected UsageViewDescriptor createUsageViewDescriptor(@NotNull UsageInfo[] usages) {
return new InheritanceToDelegationViewDescriptor(myClass);
}
@NotNull
protected UsageInfo[] findUsages() {
ArrayList<UsageInfo> usages = new ArrayList<UsageInfo>();
final PsiClass[] inheritors = ClassInheritorsSearch.search(myClass, true).toArray(PsiClass.EMPTY_ARRAY);
myClassInheritors = new HashSet<PsiClass>();
myClassInheritors.add(myClass);
addAll(myClassInheritors, inheritors);
{
ClassReferenceScanner scanner = new ClassReferenceSearchingScanner(myClass);
final MyClassInstanceReferenceVisitor instanceReferenceVisitor = new MyClassInstanceReferenceVisitor(myClass, usages);
scanner.processReferences(new ClassInstanceScanner(myClass, instanceReferenceVisitor));
MyClassMemberReferencesVisitor visitor = new MyClassMemberReferencesVisitor(usages, instanceReferenceVisitor);
myClass.accept(visitor);
myClassImplementedInterfaces = instanceReferenceVisitor.getImplementedInterfaces();
}
for (PsiClass inheritor : inheritors) {
processClass(inheritor, usages);
}
return usages.toArray(new UsageInfo[usages.size()]);
}
private FieldAccessibility getFieldAccessibility(PsiElement element) {
for (PsiClass aClass : myClassInheritors) {
if (PsiTreeUtil.isAncestor(aClass, element, false)) {
return new FieldAccessibility(true, aClass);
}
}
return FieldAccessibility.INVISIBLE;
}
protected boolean preprocessUsages(@NotNull Ref<UsageInfo[]> refUsages) {
final UsageInfo[] usagesIn = refUsages.get();
ArrayList<UsageInfo> oldUsages = new ArrayList<UsageInfo>();
addAll(oldUsages, usagesIn);
final ObjectUpcastedUsageInfo[] objectUpcastedUsageInfos = objectUpcastedUsages(usagesIn);
if (myPrepareSuccessfulSwingThreadCallback != null) {
MultiMap<PsiElement, String> conflicts = new MultiMap<PsiElement, String>();
if (objectUpcastedUsageInfos.length > 0) {
final String message = RefactoringBundle.message("instances.of.0.upcasted.to.1.were.found",
RefactoringUIUtil.getDescription(myClass, true), CommonRefactoringUtil.htmlEmphasize(
CommonClassNames.JAVA_LANG_OBJECT));
conflicts.putValue(myClass, message);
}
analyzeConflicts(usagesIn, conflicts);
if (!conflicts.isEmpty()) {
ConflictsDialog conflictsDialog = prepareConflictsDialog(conflicts, usagesIn);
if (!conflictsDialog.showAndGet()) {
if (conflictsDialog.isShowConflicts()) prepareSuccessful();
return false;
}
}
if (objectUpcastedUsageInfos.length > 0) {
showObjectUpcastedUsageView(objectUpcastedUsageInfos);
setPreviewUsages(true);
}
}
ArrayList<UsageInfo> filteredUsages = filterUsages(oldUsages);
refUsages.set(filteredUsages.toArray(new UsageInfo[filteredUsages.size()]));
prepareSuccessful();
return true;
}
private void analyzeConflicts(UsageInfo[] usage, MultiMap<PsiElement, String> conflicts) {
HashMap<PsiElement,HashSet<PsiElement>> reportedNonDelegatedUsages = new HashMap<PsiElement, HashSet<PsiElement>>();
HashMap<PsiClass,HashSet<PsiElement>> reportedUpcasts = new HashMap<PsiClass, HashSet<PsiElement>>();
// HashSet reportedObjectUpcasts = new HashSet();
// final String nameJavaLangObject = ConflictsUtil.htmlEmphasize("java.lang.Object");
final String classDescription = RefactoringUIUtil.getDescription(myClass, false);
for (UsageInfo aUsage : usage) {
final PsiElement element = aUsage.getElement();
if (aUsage instanceof InheritanceToDelegationUsageInfo) {
InheritanceToDelegationUsageInfo usageInfo = (InheritanceToDelegationUsageInfo)aUsage;
/*if (usageInfo instanceof ObjectUpcastedUsageInfo) {
PsiElement container = ConflictsUtil.getContainer(usageInfo.element);
if (!reportedObjectUpcasts.contains(container)) {
String message = "An instance of " + classDescription + " is upcasted to "
+ nameJavaLangObject + " in " + ConflictsUtil.getDescription(container, true) + ".";
conflicts.add(message);
reportedObjectUpcasts.add(container);
}
} else*/
if (!myIsDelegateOtherMembers && !usageInfo.getDelegateFieldAccessible().isAccessible()) {
if (usageInfo instanceof NonDelegatedMemberUsageInfo) {
final PsiElement nonDelegatedMember = ((NonDelegatedMemberUsageInfo)usageInfo).nonDelegatedMember;
HashSet<PsiElement> reportedContainers = reportedNonDelegatedUsages.get(nonDelegatedMember);
if (reportedContainers == null) {
reportedContainers = new HashSet<PsiElement>();
reportedNonDelegatedUsages.put(nonDelegatedMember, reportedContainers);
}
final PsiElement container = ConflictsUtil.getContainer(element);
if (!reportedContainers.contains(container)) {
String message = RefactoringBundle.message("0.uses.1.of.an.instance.of.a.2", RefactoringUIUtil.getDescription(container, true),
RefactoringUIUtil.getDescription(nonDelegatedMember, true), classDescription);
conflicts.putValue(container, CommonRefactoringUtil.capitalize(message));
reportedContainers.add(container);
}
}
else if (usageInfo instanceof UpcastedUsageInfo) {
final PsiClass upcastedTo = ((UpcastedUsageInfo)usageInfo).upcastedTo;
HashSet<PsiElement> reportedContainers = reportedUpcasts.get(upcastedTo);
if (reportedContainers == null) {
reportedContainers = new HashSet<PsiElement>();
reportedUpcasts.put(upcastedTo, reportedContainers);
}
final PsiElement container = ConflictsUtil.getContainer(element);
if (!reportedContainers.contains(container)) {
String message = RefactoringBundle.message("0.upcasts.an.instance.of.1.to.2",
RefactoringUIUtil.getDescription(container, true), classDescription,
RefactoringUIUtil.getDescription(upcastedTo, false));
conflicts.putValue(container, CommonRefactoringUtil.capitalize(message));
reportedContainers.add(container);
}
}
}
}
else if (aUsage instanceof NoLongerOverridingSubClassMethodUsageInfo) {
NoLongerOverridingSubClassMethodUsageInfo info = (NoLongerOverridingSubClassMethodUsageInfo)aUsage;
String message = RefactoringBundle.message("0.will.no.longer.override.1",
RefactoringUIUtil.getDescription(info.getSubClassMethod(), true),
RefactoringUIUtil.getDescription(info.getOverridenMethod(), true));
conflicts.putValue(info.getSubClassMethod(), message);
}
}
}
private static ObjectUpcastedUsageInfo[] objectUpcastedUsages(UsageInfo[] usages) {
ArrayList<ObjectUpcastedUsageInfo> result = new ArrayList<ObjectUpcastedUsageInfo>();
for (UsageInfo usage : usages) {
if (usage instanceof ObjectUpcastedUsageInfo) {
result.add(((ObjectUpcastedUsageInfo)usage));
}
}
return result.toArray(new ObjectUpcastedUsageInfo[result.size()]);
}
private ArrayList<UsageInfo> filterUsages(ArrayList<UsageInfo> usages) {
ArrayList<UsageInfo> result = new ArrayList<UsageInfo>();
for (UsageInfo usageInfo : usages) {
if (!(usageInfo instanceof InheritanceToDelegationUsageInfo)) {
continue;
}
if (usageInfo instanceof ObjectUpcastedUsageInfo) {
continue;
}
if (!myIsDelegateOtherMembers) {
final FieldAccessibility delegateFieldAccessible = ((InheritanceToDelegationUsageInfo)usageInfo).getDelegateFieldAccessible();
if (!delegateFieldAccessible.isAccessible()) continue;
}
result.add(usageInfo);
}
return result;
}
private void processClass(PsiClass inheritor, ArrayList<UsageInfo> usages) {
ClassReferenceScanner scanner = new ClassReferenceSearchingScanner(inheritor);
final MyClassInstanceReferenceVisitor instanceVisitor = new MyClassInstanceReferenceVisitor(inheritor, usages);
scanner.processReferences(
new ClassInstanceScanner(inheritor,
instanceVisitor)
);
MyClassInheritorMemberReferencesVisitor classMemberVisitor = new MyClassInheritorMemberReferencesVisitor(inheritor, usages, instanceVisitor);
inheritor.accept(classMemberVisitor);
PsiSubstitutor inheritorSubstitutor = TypeConversionUtil.getSuperClassSubstitutor(myClass, inheritor, PsiSubstitutor.EMPTY);
PsiMethod[] methods = inheritor.getMethods();
for (PsiMethod method : methods) {
final PsiMethod baseMethod = findSuperMethodInBaseClass(method);
if (baseMethod != null) {
if (!baseMethod.hasModifierProperty(PsiModifier.ABSTRACT)) {
usages.add(new NoLongerOverridingSubClassMethodUsageInfo(method, baseMethod));
}
else {
final PsiMethod[] methodsByName = myClass.findMethodsByName(method.getName(), false);
for (final PsiMethod classMethod : methodsByName) {
final MethodSignature signature = classMethod.getSignature(inheritorSubstitutor);
if (signature.equals(method.getSignature(PsiSubstitutor.EMPTY))) {
if (!classMethod.hasModifierProperty(PsiModifier.ABSTRACT)) {
usages.add(new NoLongerOverridingSubClassMethodUsageInfo(method, baseMethod));
break;
}
}
}
}
}
}
}
protected void performRefactoring(@NotNull UsageInfo[] usages) {
try {
for (UsageInfo aUsage : usages) {
InheritanceToDelegationUsageInfo usage = (InheritanceToDelegationUsageInfo)aUsage;
if (usage instanceof UnqualifiedNonDelegatedMemberUsageInfo) {
delegateUsageFromClass(usage.getElement(), ((NonDelegatedMemberUsageInfo)usage).nonDelegatedMember,
usage.getDelegateFieldAccessible());
}
else {
upcastToDelegation(usage.getElement(), usage.getDelegateFieldAccessible());
}
}
myAbstractDelegatedMethods = new HashSet<PsiMethod>();
addInnerClass();
addField(usages);
delegateMethods();
addImplementingInterfaces();
} catch (IncorrectOperationException e) {
LOG.error(e);
}
}
private void addInnerClass() throws IncorrectOperationException {
if (!myIsInnerClassNeeded) return;
PsiClass innerClass = myFactory.createClass(myInnerClassName);
final PsiSubstitutor superClassSubstitutor = TypeConversionUtil.getSuperClassSubstitutor(myBaseClass, myClass, PsiSubstitutor.EMPTY);
final PsiClassType superClassType = myFactory.createType(myBaseClass, superClassSubstitutor);
final PsiJavaCodeReferenceElement baseClassReferenceElement = myFactory.createReferenceElementByType(superClassType);
if (!myBaseClass.isInterface()) {
innerClass.getExtendsList().add(baseClassReferenceElement);
} else {
innerClass.getImplementsList().add(baseClassReferenceElement);
}
PsiUtil.setModifierProperty(innerClass, PsiModifier.PRIVATE, true);
innerClass = (PsiClass) myClass.add(innerClass);
List<InnerClassMethod> innerClassMethods = getInnerClassMethods();
for (InnerClassMethod innerClassMethod : innerClassMethods) {
innerClassMethod.createMethod(innerClass);
}
}
private void delegateUsageFromClass(PsiElement element, PsiElement nonDelegatedMember,
FieldAccessibility fieldAccessibility) throws IncorrectOperationException {
if (element instanceof PsiReferenceExpression) {
PsiReferenceExpression referenceExpression = (PsiReferenceExpression) element;
if (referenceExpression.getQualifierExpression() != null) {
upcastToDelegation(referenceExpression.getQualifierExpression(), fieldAccessibility);
} else {
final String name = ((PsiNamedElement) nonDelegatedMember).getName();
final String qualifier;
if (isStatic (nonDelegatedMember)) {
qualifier = myBaseClass.getName();
}
else if (!fieldAccessibility.isAccessible() && myGenerateGetter) {
qualifier = myGetterName + "()";
}
else {
qualifier = myFieldName;
}
PsiExpression newExpr = myFactory.createExpressionFromText(qualifier + "." + name, element);
newExpr = (PsiExpression) CodeStyleManager.getInstance(myProject).reformat(newExpr);
element.replace(newExpr);
}
}
else if (element instanceof PsiJavaCodeReferenceElement) {
final String name = ((PsiNamedElement) nonDelegatedMember).getName();
PsiElement parent = element.getParent ();
if (!isStatic (nonDelegatedMember) && parent instanceof PsiNewExpression) {
final PsiNewExpression newExpr = (PsiNewExpression) parent;
if (newExpr.getQualifier() != null) {
upcastToDelegation(newExpr.getQualifier(), fieldAccessibility);
} else {
final String qualifier;
if (!fieldAccessibility.isAccessible() && myGenerateGetter) {
qualifier = myGetterName + "()";
}
else {
qualifier = myFieldName;
}
newExpr.replace(myFactory.createExpressionFromText(qualifier + "." + newExpr.getText(), parent));
}
}
else {
final String qualifier = myBaseClass.getName();
PsiJavaCodeReferenceElement newRef = myFactory.createFQClassNameReferenceElement(qualifier + "." + name, element.getResolveScope ());
//newRef = (PsiJavaCodeReferenceElement) CodeStyleManager.getInstance(myProject).reformat(newRef);
element.replace(newRef);
}
} else {
LOG.assertTrue(false);
}
}
private static boolean isStatic(PsiElement member) {
if (member instanceof PsiModifierListOwner) {
final PsiModifierListOwner method = (PsiModifierListOwner) member;
return method.hasModifierProperty (PsiModifier.STATIC);
}
return false;
}
private void upcastToDelegation(PsiElement element, FieldAccessibility fieldAccessibility) throws IncorrectOperationException {
final PsiExpression expression = (PsiExpression) element;
final PsiExpression newExpr;
final PsiReferenceExpression ref;
final String delegateQualifier;
if (!(expression instanceof PsiThisExpression || expression instanceof PsiSuperExpression)) {
delegateQualifier = "a.";
} else {
PsiResolveHelper resolveHelper = JavaPsiFacade.getInstance(myProject).getResolveHelper();
final PsiVariable psiVariable = resolveHelper.resolveReferencedVariable(myFieldName, element);
if (psiVariable == null) {
delegateQualifier = "";
} else {
delegateQualifier = "a.";
}
}
if (!fieldAccessibility.isAccessible() && myGenerateGetter) {
newExpr = myFactory.createExpressionFromText(delegateQualifier + myGetterName + "()", expression);
ref = (PsiReferenceExpression) ((PsiMethodCallExpression) newExpr).getMethodExpression().getQualifierExpression();
} else {
newExpr = myFactory.createExpressionFromText(delegateQualifier + myFieldName, expression);
ref = (PsiReferenceExpression) ((PsiReferenceExpression) newExpr).getQualifierExpression();
}
// LOG.debug("upcastToDelegation:" + element + ":newExpr = " + newExpr);
// LOG.debug("upcastToDelegation:" + element + ":ref = " + ref);
if (ref != null) {
ref.replace(expression);
}
expression.replace(newExpr);
// LOG.debug("upcastToDelegation:" + element + ":replaced = " + replaced);
}
private void delegateMethods() throws IncorrectOperationException {
for (PsiMethod method : myDelegatedMethods) {
if (!myAbstractDelegatedMethods.contains(method)) {
PsiMethod methodToAdd = delegateMethod(myFieldName, method, getSuperSubstitutor(method.getContainingClass()));
@PsiModifier.ModifierConstant String visibility = myDelegatedMethodsVisibility.get(method);
if (visibility != null) {
PsiUtil.setModifierProperty(methodToAdd, visibility, true);
}
myClass.add(methodToAdd);
}
}
}
private PsiMethod delegateMethod(String delegationTarget,
PsiMethod method,
PsiSubstitutor substitutor) throws IncorrectOperationException {
substitutor = OverrideImplementExploreUtil.correctSubstitutor(method, substitutor);
PsiMethod methodToAdd = GenerateMembersUtil.substituteGenericMethod(method, substitutor);
methodToAdd.getModifierList().setModifierProperty(PsiModifier.ABSTRACT, false);
NullableNotNullManager.getInstance(myProject).copyNullableOrNotNullAnnotation(method, methodToAdd);
final String delegationBody = getDelegationBody(methodToAdd, delegationTarget);
PsiCodeBlock newBody = myFactory.createCodeBlockFromText(delegationBody, method);
PsiCodeBlock oldBody = methodToAdd.getBody();
if (oldBody != null) {
oldBody.replace(newBody);
}
else {
methodToAdd.addBefore(newBody, null);
}
if (methodToAdd.getDocComment() != null) methodToAdd.getDocComment().delete();
methodToAdd = (PsiMethod)CodeStyleManager.getInstance(myProject).reformat(methodToAdd);
methodToAdd = (PsiMethod)JavaCodeStyleManager.getInstance(myProject).shortenClassReferences(methodToAdd);
return methodToAdd;
}
private static String getDelegationBody(PsiMethod methodToAdd, String delegationTarget) {
StringBuilder buffer = new StringBuilder();
buffer.append("{\n");
if (!PsiType.VOID.equals(methodToAdd.getReturnType())) {
buffer.append("return ");
}
buffer.append(delegationTarget);
buffer.append(".");
buffer.append(methodToAdd.getName());
buffer.append("(");
PsiParameter[] params = methodToAdd.getParameterList().getParameters();
for (int i = 0; i < params.length; i++) {
PsiParameter param = params[i];
if (i > 0) {
buffer.append(",");
}
buffer.append(param.getName());
}
buffer.append(");\n}");
return buffer.toString();
}
private void addImplementingInterfaces() throws IncorrectOperationException {
final PsiReferenceList implementsList = myClass.getImplementsList();
LOG.assertTrue(implementsList != null);
for (PsiClass delegatedInterface : myDelegatedInterfaces) {
if (!myClassImplementedInterfaces.contains(delegatedInterface)) {
implementsList.add(myFactory.createClassReferenceElement(delegatedInterface));
}
}
if (!myBaseClass.isInterface()) {
final PsiReferenceList extendsList = myClass.getExtendsList();
LOG.assertTrue(extendsList != null);
extendsList.getReferenceElements()[0].delete();
} else {
final PsiJavaCodeReferenceElement[] interfaceRefs = implementsList.getReferenceElements();
for (PsiJavaCodeReferenceElement interfaceRef : interfaceRefs) {
final PsiElement resolved = interfaceRef.resolve();
if (myManager.areElementsEquivalent(myBaseClass, resolved)) {
interfaceRef.delete();
break;
}
}
}
}
private void addField(UsageInfo[] usages) throws IncorrectOperationException {
final String fieldVisibility = getFieldVisibility(usages);
final boolean fieldInitializerNeeded = isFieldInitializerNeeded();
PsiField field = createField(fieldVisibility, fieldInitializerNeeded, defaultClassFieldType());
if (!myIsInnerClassNeeded) {
field.getTypeElement().replace(myFactory.createTypeElement(myBaseClassType));
if (fieldInitializerNeeded) {
final PsiJavaCodeReferenceElement classReferenceElement = myFactory.createReferenceElementByType(myBaseClassType);
PsiNewExpression newExpression = (PsiNewExpression) field.getInitializer();
newExpression.getClassReference().replace(classReferenceElement);
}
}
field = (PsiField) CodeStyleManager.getInstance(myProject).reformat(field);
myClass.add(field);
if (!fieldInitializerNeeded) {
fixConstructors();
}
if (myGenerateGetter) {
final String getterVisibility = PsiModifier.PUBLIC;
StringBuffer getterBuffer = new StringBuffer();
getterBuffer.append(getterVisibility);
getterBuffer.append(" Object ");
getterBuffer.append(myGetterName);
getterBuffer.append("() {\n return ");
getterBuffer.append(myFieldName);
getterBuffer.append(";\n}");
PsiMethod getter = myFactory.createMethodFromText(getterBuffer.toString(), myClass);
getter.getReturnTypeElement().replace(myFactory.createTypeElement(myBaseClassType));
getter = (PsiMethod) CodeStyleManager.getInstance(myProject).reformat(getter);
myClass.add(getter);
}
}
private String getFieldVisibility(UsageInfo[] usages) {
if (myIsDelegateOtherMembers && !myGenerateGetter) {
return PsiModifier.PUBLIC;
}
for (UsageInfo aUsage : usages) {
InheritanceToDelegationUsageInfo usage = (InheritanceToDelegationUsageInfo)aUsage;
final FieldAccessibility delegateFieldAccessible = usage.getDelegateFieldAccessible();
if (delegateFieldAccessible.isAccessible() && delegateFieldAccessible.getContainingClass() != myClass) {
return PsiModifier.PROTECTED;
}
}
return PsiModifier.PRIVATE;
}
private String defaultClassFieldType() {
return (myIsInnerClassNeeded ? myInnerClassName : "Object");
}
private PsiField createField(final String fieldVisibility, final boolean fieldInitializerNeeded, String defaultTypeName) throws IncorrectOperationException {
StringBuffer buffer = new StringBuffer();
buffer.append(fieldVisibility);
buffer.append(" final " + defaultTypeName + " ");
buffer.append(myFieldName);
if (fieldInitializerNeeded) {
buffer.append(" = new " + defaultTypeName + "()");
}
buffer.append(";");
return myFactory.createFieldFromText(buffer.toString(), myClass);
}
private void fixConstructors() throws IncorrectOperationException {
if (myBaseClass.isInterface()) return;
final PsiJavaCodeReferenceElement baseClassReference = myFactory.createClassReferenceElement(myBaseClass);
PsiMethod[] constructors = myClass.getConstructors();
for (PsiMethod constructor : constructors) {
PsiCodeBlock body = constructor.getBody();
final PsiStatement[] statements = body.getStatements();
String fieldQualifier = "";
PsiParameter[] constructorParams = constructor.getParameterList().getParameters();
for (PsiParameter constructorParam : constructorParams) {
if (myFieldName.equals(constructorParam.getName())) {
fieldQualifier = "this.";
break;
}
}
final String assignmentText = fieldQualifier + myFieldName + "= new " + defaultClassFieldType() + "()";
if (statements.length < 1 || !JavaHighlightUtil.isSuperOrThisCall(statements[0], true, true) || myBaseClass.isInterface()) {
PsiExpressionStatement assignmentStatement =
(PsiExpressionStatement)myFactory.createStatementFromText(
assignmentText, body
);
if (!myIsInnerClassNeeded) {
final PsiAssignmentExpression assignmentExpr = (PsiAssignmentExpression)assignmentStatement.getExpression();
final PsiNewExpression newExpression = (PsiNewExpression)assignmentExpr.getRExpression();
assert newExpression != null;
final PsiJavaCodeReferenceElement classRef = newExpression.getClassReference();
assert classRef != null;
classRef.replace(baseClassReference);
}
assignmentStatement = (PsiExpressionStatement)CodeStyleManager.getInstance(myProject).reformat(assignmentStatement);
if (statements.length > 0) {
if (!JavaHighlightUtil.isSuperOrThisCall(statements[0], true, false)) {
body.addBefore(assignmentStatement, statements[0]);
}
else {
body.addAfter(assignmentStatement, statements[0]);
}
}
else {
body.add(assignmentStatement);
}
}
else {
final PsiExpressionStatement callStatement = ((PsiExpressionStatement)statements[0]);
if (!JavaHighlightUtil.isSuperOrThisCall(callStatement, false, true)) {
final PsiMethodCallExpression superConstructorCall =
(PsiMethodCallExpression)callStatement.getExpression();
PsiAssignmentExpression assignmentExpression =
(PsiAssignmentExpression)myFactory.createExpressionFromText(
assignmentText, superConstructorCall
);
PsiNewExpression newExpression =
(PsiNewExpression)assignmentExpression.getRExpression();
if (!myIsInnerClassNeeded) {
newExpression.getClassReference().replace(baseClassReference);
}
assignmentExpression = (PsiAssignmentExpression)CodeStyleManager.getInstance(myProject).reformat(assignmentExpression);
newExpression.getArgumentList().replace(superConstructorCall.getArgumentList());
superConstructorCall.replace(assignmentExpression);
}
}
}
}
private boolean isFieldInitializerNeeded() {
if (myBaseClass.isInterface()) return true;
PsiMethod[] constructors = myClass.getConstructors();
for (PsiMethod constructor : constructors) {
final PsiStatement[] statements = constructor.getBody().getStatements();
if (statements.length > 0 && JavaHighlightUtil.isSuperOrThisCall(statements[0], true, false)) return false;
}
return true;
}
private List<InnerClassMethod> getInnerClassMethods() {
ArrayList<InnerClassMethod> result = new ArrayList<InnerClassMethod>();
// find all neccessary constructors
if (!myBaseClass.isInterface()) {
PsiMethod[] constructors = myClass.getConstructors();
for (PsiMethod constructor : constructors) {
final PsiStatement[] statements = constructor.getBody().getStatements();
if (statements.length > 0 && JavaHighlightUtil.isSuperOrThisCall(statements[0], true, false)) {
final PsiMethodCallExpression superConstructorCall =
(PsiMethodCallExpression)((PsiExpressionStatement)statements[0]).getExpression();
PsiElement superConstructor = superConstructorCall.getMethodExpression().resolve();
if (superConstructor instanceof PsiMethod && ((PsiMethod)superConstructor).isConstructor()) {
result.add(new InnerClassConstructor((PsiMethod)superConstructor));
}
}
}
}
// find overriding/implementing method
{
class InnerClassOverridingMethod extends InnerClassMethod {
public InnerClassOverridingMethod(PsiMethod method) {
super(method);
}
public void createMethod(PsiClass innerClass)
throws IncorrectOperationException {
OverriddenMethodClassMemberReferencesVisitor visitor = new OverriddenMethodClassMemberReferencesVisitor();
myClass.accept(visitor);
final List<PsiAction> actions = visitor.getPsiActions();
for (PsiAction action : actions) {
action.run();
}
innerClass.add(myMethod);
myMethod.delete();
// myMethod.replace(delegateMethod(myMethod));
}
}
for (PsiMethod method : myOverriddenMethods) {
result.add(new InnerClassOverridingMethod(method));
}
}
// fix abstract methods
{
class InnerClassAbstractMethod extends InnerClassMethod {
private final boolean myImplicitImplementation;
public InnerClassAbstractMethod(PsiMethod method, final boolean implicitImplementation) {
super(method);
myImplicitImplementation = implicitImplementation;
}
public void createMethod(PsiClass innerClass)
throws IncorrectOperationException {
PsiSubstitutor substitutor = getSuperSubstitutor(myMethod.getContainingClass());
PsiMethod method = delegateMethod(myClass.getName() + ".this", myMethod, substitutor);
final PsiClass containingClass = myMethod.getContainingClass();
if (myBaseClass.isInterface() || containingClass.isInterface()) {
PsiUtil.setModifierProperty(method, PsiModifier.PUBLIC, true);
}
innerClass.add(method);
if (!myImplicitImplementation) {
final MethodSignature signature = myMethod.getSignature(substitutor);
PsiMethod outerMethod = MethodSignatureUtil.findMethodBySignature(myClass, signature, false);
if (outerMethod == null) {
String visibility = checkOuterClassAbstractMethod(signature);
PsiMethod newOuterMethod = (PsiMethod)myClass.add(myMethod);
PsiUtil.setModifierProperty(newOuterMethod, visibility, true);
final PsiDocComment docComment = newOuterMethod.getDocComment();
if (docComment != null) {
docComment.delete();
}
}
}
}
}
PsiMethod[] methods = myBaseClass.getAllMethods();
for (PsiMethod method : methods) {
if (method.hasModifierProperty(PsiModifier.ABSTRACT)) {
final MethodSignature signature = method.getSignature(getSuperSubstitutor(method.getContainingClass()));
PsiMethod classMethod = MethodSignatureUtil.findMethodBySignature(myClass, signature, true);
if (classMethod == null || classMethod.hasModifierProperty(PsiModifier.ABSTRACT)) {
result.add(new InnerClassAbstractMethod(method, false));
}
else if ((myBaseClass.isInterface() && classMethod.getContainingClass() != myClass)) { // IDEADEV-19675
result.add(new InnerClassAbstractMethod(method, true));
}
}
}
}
return result;
}
private void showObjectUpcastedUsageView(final ObjectUpcastedUsageInfo[] usages) {
UsageViewPresentation presentation = new UsageViewPresentation();
presentation.setTargetsNodeText(RefactoringBundle.message("replacing.inheritance.with.delegation"));
presentation.setCodeUsagesString(RefactoringBundle.message("instances.casted.to.java.lang.object"));
final String upcastedString = RefactoringBundle.message("instances.upcasted.to.object");
presentation.setUsagesString(upcastedString);
presentation.setTabText(upcastedString);
UsageViewManager manager = UsageViewManager.getInstance(myProject);
manager.showUsages(
new UsageTarget[]{new PsiElement2UsageTargetAdapter(myClass)},
UsageInfoToUsageConverter.convert(new PsiElement[]{myClass}, usages),
presentation
);
WindowManager.getInstance().getStatusBar(myProject).setInfo(RefactoringBundle.message("instances.upcasted.to.java.lang.object.found"));
}
/**
*
* @param methodSignature
* @return Visibility
*/
@PsiModifier.ModifierConstant
private String checkOuterClassAbstractMethod(MethodSignature methodSignature) {
String visibility = PsiModifier.PROTECTED;
for (PsiMethod method : myDelegatedMethods) {
MethodSignature otherSignature = method.getSignature(getSuperSubstitutor(method.getContainingClass()));
if (MethodSignatureUtil.areSignaturesEqual(otherSignature, methodSignature)) {
visibility = VisibilityUtil.getHighestVisibility(visibility,
VisibilityUtil.getVisibilityModifier(method.getModifierList()));
myAbstractDelegatedMethods.add(method);
}
}
return visibility;
}
private Set<PsiMethod> getOverriddenMethods() {
LinkedHashSet<PsiMethod> result = new LinkedHashSet<PsiMethod>();
PsiMethod[] methods = myClass.getMethods();
for (PsiMethod method : methods) {
if (findSuperMethodInBaseClass(method) != null) result.add(method);
}
return result;
}
@Nullable
private PsiMethod findSuperMethodInBaseClass (PsiMethod method) {
final PsiMethod[] superMethods = method.findSuperMethods();
for (PsiMethod superMethod : superMethods) {
PsiClass containingClass = superMethod.getContainingClass();
if (InheritanceUtil.isInheritorOrSelf(myBaseClass, containingClass, true)) {
String qName = containingClass.getQualifiedName();
if (qName == null || !CommonClassNames.JAVA_LANG_OBJECT.equals(qName)) {
return superMethod;
}
}
}
return null;
}
protected String getCommandName() {
return RefactoringBundle.message("replace.inheritance.with.delegation.command", DescriptiveNameUtil.getDescriptiveName(myClass));
}
private Set<PsiMember> getAllBaseClassMembers() {
HashSet<PsiMember> result = new HashSet<PsiMember>();
addAll(result, myBaseClass.getAllFields());
addAll(result, myBaseClass.getAllInnerClasses());
addAll(result, myBaseClass.getAllMethods());
//remove java.lang.Object members
for (Iterator<PsiMember> iterator = result.iterator(); iterator.hasNext();) {
PsiMember member = iterator.next();
if (CommonClassNames.JAVA_LANG_OBJECT.equals(member.getContainingClass().getQualifiedName())) {
iterator.remove();
}
}
return Collections.unmodifiableSet(result);
}
private Set<PsiClass> getAllBases() {
HashSet<PsiClass> temp = new HashSet<PsiClass>();
InheritanceUtil.getSuperClasses(myBaseClass, temp, true);
temp.add(myBaseClass);
return Collections.unmodifiableSet(temp);
}
private static <T> void addAll(Collection<T> collection, T[] objs) {
for (T obj : objs) {
collection.add(obj);
}
}
private boolean isDelegated(PsiMember classMember) {
if(!(classMember instanceof PsiMethod)) return false;
final PsiMethod method = (PsiMethod) classMember;
for (PsiMethod delegatedMethod : myDelegatedMethods) {
//methods reside in base class, so no substitutor needed
if (MethodSignatureUtil.areSignaturesEqual(method.getSignature(PsiSubstitutor.EMPTY),
delegatedMethod.getSignature(PsiSubstitutor.EMPTY))) {
return true;
}
}
return false;
}
private class MyClassInheritorMemberReferencesVisitor extends ClassMemberReferencesVisitor {
private final List<UsageInfo> myUsageInfoStorage;
private final ClassInstanceScanner.ClassInstanceReferenceVisitor myInstanceVisitor;
MyClassInheritorMemberReferencesVisitor(PsiClass aClass, List<UsageInfo> usageInfoStorage,
ClassInstanceScanner.ClassInstanceReferenceVisitor instanceScanner) {
super(aClass);
myUsageInfoStorage = usageInfoStorage;
myInstanceVisitor = instanceScanner;
}
protected void visitClassMemberReferenceElement(PsiMember classMember, PsiJavaCodeReferenceElement classMemberReference) {
if ("super".equals(classMemberReference.getText()) && classMemberReference.getParent() instanceof PsiMethodCallExpression) {
return;
}
if (classMember != null && myBaseClassMembers.contains(classMember) && !isDelegated(classMember)) {
final FieldAccessibility delegateFieldVisibility = new FieldAccessibility(true, getPsiClass());
final InheritanceToDelegationUsageInfo usageInfo;
if (classMemberReference instanceof PsiReferenceExpression) {
if (((PsiReferenceExpression) classMemberReference).getQualifierExpression() == null) {
usageInfo = new UnqualifiedNonDelegatedMemberUsageInfo(classMemberReference, classMember,
delegateFieldVisibility);
} else {
usageInfo = new NonDelegatedMemberUsageInfo(
((PsiReferenceExpression) classMemberReference).getQualifierExpression(),
classMember, delegateFieldVisibility
);
}
myUsageInfoStorage.add(usageInfo);
}
else /*if (classMemberReference instanceof PsiJavaCodeReferenceElement)*/ {
usageInfo = new UnqualifiedNonDelegatedMemberUsageInfo(classMemberReference, classMember,
delegateFieldVisibility);
myUsageInfoStorage.add(usageInfo);
}
}
}
@Override public void visitThisExpression(PsiThisExpression expression) {
ClassInstanceScanner.processNonArrayExpression(myInstanceVisitor, expression, null);
}
}
private class MyClassMemberReferencesVisitor extends MyClassInheritorMemberReferencesVisitor {
MyClassMemberReferencesVisitor(List<UsageInfo> usageInfoStorage,
ClassInstanceScanner.ClassInstanceReferenceVisitor instanceScanner) {
super(InheritanceToDelegationProcessor.this.myClass, usageInfoStorage, instanceScanner);
}
@Override public void visitMethod(PsiMethod method) {
if (!myOverriddenMethods.contains(method)) {
super.visitMethod(method);
}
}
}
interface PsiAction {
void run() throws IncorrectOperationException;
}
/**
* This visitor should be called for overriden methods before they are moved to an inner class
*/
private class OverriddenMethodClassMemberReferencesVisitor extends ClassMemberReferencesVisitor {
private final ArrayList<PsiAction> myPsiActions;
private final PsiThisExpression myQualifiedThis;
OverriddenMethodClassMemberReferencesVisitor() throws IncorrectOperationException {
super(myClass);
myPsiActions = new ArrayList<PsiAction>();
final PsiJavaCodeReferenceElement classReferenceElement = myFactory.createClassReferenceElement(myClass);
myQualifiedThis = (PsiThisExpression) myFactory.createExpressionFromText("A.this", null);
myQualifiedThis.getQualifier().replace(classReferenceElement);
}
public List<PsiAction> getPsiActions() {
return myPsiActions;
}
class QualifyThis implements PsiAction {
private final PsiThisExpression myThisExpression;
QualifyThis(PsiThisExpression thisExpression) {
myThisExpression = thisExpression;
}
public void run() throws IncorrectOperationException {
myThisExpression.replace(myQualifiedThis);
}
}
class QualifyName implements PsiAction {
private final PsiReferenceExpression myRef;
private final String myReferencedName;
QualifyName(PsiReferenceExpression ref, String name) {
myRef = ref;
myReferencedName = name;
}
public void run() throws IncorrectOperationException {
PsiReferenceExpression newRef =
(PsiReferenceExpression) myFactory.createExpressionFromText("a." + myReferencedName, null);
newRef.getQualifierExpression().replace(myQualifiedThis);
myRef.replace(newRef);
}
}
class QualifyWithField implements PsiAction {
private final PsiReferenceExpression myReference;
private final String myReferencedName;
public QualifyWithField(final PsiReferenceExpression reference, final String name) {
myReference = reference;
myReferencedName = name;
}
public void run() throws IncorrectOperationException {
PsiReferenceExpression newRef =
(PsiReferenceExpression) myFactory.createExpressionFromText(myFieldName + "." + myReferencedName, null);
myReference.replace(newRef);
}
}
protected void visitClassMemberReferenceExpression(PsiMember classMember,
PsiReferenceExpression classMemberReference) {
if (classMember instanceof PsiField) {
final PsiField field = (PsiField) classMember;
if (field.getContainingClass().equals(myClass)) {
final String name = field.getName();
final PsiField baseField = myBaseClass.findFieldByName(name, true);
if (baseField != null) {
myPsiActions.add(new QualifyName(classMemberReference, name));
} else if (classMemberReference.getQualifierExpression() instanceof PsiThisExpression) {
myPsiActions.add(new QualifyThis((PsiThisExpression) classMemberReference.getQualifierExpression()));
}
}
} else if (classMember instanceof PsiMethod) {
final PsiMethod method = (PsiMethod) classMember;
if (method.getContainingClass().equals(myClass)) {
if (!myOverriddenMethods.contains(method)) {
final PsiMethod baseMethod = findSuperMethodInBaseClass(method);
if (baseMethod != null) {
myPsiActions.add(new QualifyName(classMemberReference, baseMethod.getName()));
} else if (classMemberReference.getQualifierExpression() instanceof PsiThisExpression) {
myPsiActions.add(new QualifyThis((PsiThisExpression) classMemberReference.getQualifierExpression()));
}
}
else if (!myDelegatedMethods.contains(method)) {
myPsiActions.add(new QualifyWithField(classMemberReference, method.getName()));
}
}
}
}
@Override public void visitThisExpression(final PsiThisExpression expression) {
class Visitor implements ClassInstanceScanner.ClassInstanceReferenceVisitor {
public void visitQualifier(PsiReferenceExpression qualified, PsiExpression instanceRef, PsiElement referencedInstance) {
LOG.assertTrue(false);
}
public void visitTypeCast(PsiTypeCastExpression typeCastExpression, PsiExpression instanceRef, PsiElement referencedInstance) {
processType(typeCastExpression.getCastType().getType());
}
public void visitReadUsage(PsiExpression instanceRef, PsiType expectedType, PsiElement referencedInstance) {
processType(expectedType);
}
public void visitWriteUsage(PsiExpression instanceRef, PsiType assignedType, PsiElement referencedInstance) {
LOG.assertTrue(false);
}
private void processType(PsiType type) {
final PsiClass resolved = PsiUtil.resolveClassInType(type);
if (resolved != null && !myBaseClassBases.contains(resolved)) {
myPsiActions.add(new QualifyThis(expression));
}
}
}
Visitor visitor = new Visitor();
ClassInstanceScanner.processNonArrayExpression(visitor, expression, null);
}
protected void visitClassMemberReferenceElement(PsiMember classMember, PsiJavaCodeReferenceElement classMemberReference) {
}
}
private final class MyClassInstanceReferenceVisitor implements ClassInstanceScanner.ClassInstanceReferenceVisitor {
private final PsiClass myClass;
private final List<UsageInfo> myUsageInfoStorage;
private final Set<PsiClass> myImplementedInterfaces;
public MyClassInstanceReferenceVisitor(PsiClass aClass, List<UsageInfo> usageInfoStorage) {
myClass = aClass;
myUsageInfoStorage = usageInfoStorage;
myImplementedInterfaces = getImplementedInterfaces();
}
public Set<PsiClass> getImplementedInterfaces() {
PsiClass aClass = myClass;
HashSet<PsiClass> result = new HashSet<PsiClass>();
while (aClass != null && !myManager.areElementsEquivalent(aClass, myBaseClass)) {
final PsiClassType[] implementsTypes = aClass.getImplementsListTypes();
for (PsiClassType implementsType : implementsTypes) {
PsiClass resolved = implementsType.resolve();
if (resolved != null && !myManager.areElementsEquivalent(resolved, myBaseClass)) {
result.add(resolved);
InheritanceUtil.getSuperClasses(resolved, result, true);
}
}
aClass = aClass.getSuperClass();
}
return result;
}
public void visitQualifier(PsiReferenceExpression qualified, PsiExpression instanceRef, PsiElement referencedInstance) {
final PsiExpression qualifierExpression = qualified.getQualifierExpression();
// do not add usages inside a class
if (qualifierExpression == null
|| qualifierExpression instanceof PsiThisExpression
|| qualifierExpression instanceof PsiSuperExpression) {
return;
}
PsiElement resolved = qualified.resolve();
if (resolved != null && (myBaseClassMembers.contains(resolved) || myOverriddenMethods.contains(resolved))
&& !isDelegated((PsiMember)resolved)) {
myUsageInfoStorage.add(new NonDelegatedMemberUsageInfo(instanceRef, resolved, getFieldAccessibility(instanceRef)));
}
}
public void visitTypeCast(PsiTypeCastExpression typeCastExpression, PsiExpression instanceRef, PsiElement referencedInstance) {
processTypedUsage(typeCastExpression.getCastType().getType(), instanceRef);
}
public void visitReadUsage(PsiExpression instanceRef, PsiType expectedType, PsiElement referencedInstance) {
processTypedUsage(expectedType, instanceRef);
}
public void visitWriteUsage(PsiExpression instanceRef, PsiType assignedType, PsiElement referencedInstance) {
}
private void processTypedUsage(PsiType type, PsiExpression instanceRef) {
final PsiClass aClass = PsiUtil.resolveClassInType(type);
if (aClass == null) return;
String qName = aClass.getQualifiedName();
if (qName != null && CommonClassNames.JAVA_LANG_OBJECT.equals(qName)) {
myUsageInfoStorage.add(new ObjectUpcastedUsageInfo(instanceRef, aClass, getFieldAccessibility(instanceRef)));
} else {
if (myBaseClassBases.contains(aClass)
&& !myImplementedInterfaces.contains(aClass) && !myDelegatedInterfaces.contains(aClass)) {
myUsageInfoStorage.add(new UpcastedUsageInfo(instanceRef, aClass, getFieldAccessibility(instanceRef)));
}
}
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/webhook.proto
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* Represents session information communicated to and from the webhook.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.SessionInfo}
*/
public final class SessionInfo extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.SessionInfo)
SessionInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SessionInfo.newBuilder() to construct.
private SessionInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SessionInfo() {
session_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SessionInfo();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private SessionInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
session_ = s;
break;
}
case 18:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
parameters_ =
com.google.protobuf.MapField.newMapField(
ParametersDefaultEntryHolder.defaultEntry);
mutable_bitField0_ |= 0x00000001;
}
com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Value>
parameters__ =
input.readMessage(
ParametersDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
parameters_.getMutableMap().put(parameters__.getKey(), parameters__.getValue());
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.WebhookProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_SessionInfo_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapField internalGetMapField(int number) {
switch (number) {
case 2:
return internalGetParameters();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.WebhookProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_SessionInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.SessionInfo.class,
com.google.cloud.dialogflow.cx.v3beta1.SessionInfo.Builder.class);
}
public static final int SESSION_FIELD_NUMBER = 1;
private volatile java.lang.Object session_;
/**
*
*
* <pre>
* Always present for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Ignored for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* The unique identifier of the [session][google.cloud.dialogflow.cx.v3beta1.DetectIntentRequest.session]. This
* field can be used by the webhook to identify a session.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/sessions/<Session ID>` or `projects/<Project ID>/locations/<Location
* ID>/agents/<Agent ID>/environments/<Environment ID>/sessions/<Session ID>`
* if environment is specified.
* </pre>
*
* <code>string session = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The session.
*/
@java.lang.Override
public java.lang.String getSession() {
java.lang.Object ref = session_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
session_ = s;
return s;
}
}
/**
*
*
* <pre>
* Always present for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Ignored for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* The unique identifier of the [session][google.cloud.dialogflow.cx.v3beta1.DetectIntentRequest.session]. This
* field can be used by the webhook to identify a session.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/sessions/<Session ID>` or `projects/<Project ID>/locations/<Location
* ID>/agents/<Agent ID>/environments/<Environment ID>/sessions/<Session ID>`
* if environment is specified.
* </pre>
*
* <code>string session = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for session.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSessionBytes() {
java.lang.Object ref = session_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
session_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PARAMETERS_FIELD_NUMBER = 2;
private static final class ParametersDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Value>
defaultEntry =
com.google.protobuf.MapEntry
.<java.lang.String, com.google.protobuf.Value>newDefaultInstance(
com.google.cloud.dialogflow.cx.v3beta1.WebhookProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_SessionInfo_ParametersEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.MESSAGE,
com.google.protobuf.Value.getDefaultInstance());
}
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.Value> parameters_;
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.Value>
internalGetParameters() {
if (parameters_ == null) {
return com.google.protobuf.MapField.emptyMapField(ParametersDefaultEntryHolder.defaultEntry);
}
return parameters_;
}
public int getParametersCount() {
return internalGetParameters().getMap().size();
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
@java.lang.Override
public boolean containsParameters(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
return internalGetParameters().getMap().containsKey(key);
}
/** Use {@link #getParametersMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.Value> getParameters() {
return getParametersMap();
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, com.google.protobuf.Value> getParametersMap() {
return internalGetParameters().getMap();
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.Value getParametersOrDefault(
java.lang.String key, com.google.protobuf.Value defaultValue) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, com.google.protobuf.Value> map =
internalGetParameters().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.Value getParametersOrThrow(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, com.google.protobuf.Value> map =
internalGetParameters().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(session_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, session_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetParameters(), ParametersDefaultEntryHolder.defaultEntry, 2);
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(session_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, session_);
}
for (java.util.Map.Entry<java.lang.String, com.google.protobuf.Value> entry :
internalGetParameters().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Value> parameters__ =
ParametersDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, parameters__);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.SessionInfo)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.SessionInfo other =
(com.google.cloud.dialogflow.cx.v3beta1.SessionInfo) obj;
if (!getSession().equals(other.getSession())) return false;
if (!internalGetParameters().equals(other.internalGetParameters())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SESSION_FIELD_NUMBER;
hash = (53 * hash) + getSession().hashCode();
if (!internalGetParameters().getMap().isEmpty()) {
hash = (37 * hash) + PARAMETERS_FIELD_NUMBER;
hash = (53 * hash) + internalGetParameters().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dialogflow.cx.v3beta1.SessionInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Represents session information communicated to and from the webhook.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.SessionInfo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.SessionInfo)
com.google.cloud.dialogflow.cx.v3beta1.SessionInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.WebhookProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_SessionInfo_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapField internalGetMapField(int number) {
switch (number) {
case 2:
return internalGetParameters();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapField internalGetMutableMapField(int number) {
switch (number) {
case 2:
return internalGetMutableParameters();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.WebhookProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_SessionInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.SessionInfo.class,
com.google.cloud.dialogflow.cx.v3beta1.SessionInfo.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3beta1.SessionInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
session_ = "";
internalGetMutableParameters().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.WebhookProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_SessionInfo_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.SessionInfo getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.SessionInfo build() {
com.google.cloud.dialogflow.cx.v3beta1.SessionInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.SessionInfo buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.SessionInfo result =
new com.google.cloud.dialogflow.cx.v3beta1.SessionInfo(this);
int from_bitField0_ = bitField0_;
result.session_ = session_;
result.parameters_ = internalGetParameters();
result.parameters_.makeImmutable();
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.SessionInfo) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.SessionInfo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.SessionInfo other) {
if (other == com.google.cloud.dialogflow.cx.v3beta1.SessionInfo.getDefaultInstance())
return this;
if (!other.getSession().isEmpty()) {
session_ = other.session_;
onChanged();
}
internalGetMutableParameters().mergeFrom(other.internalGetParameters());
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.dialogflow.cx.v3beta1.SessionInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.dialogflow.cx.v3beta1.SessionInfo) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object session_ = "";
/**
*
*
* <pre>
* Always present for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Ignored for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* The unique identifier of the [session][google.cloud.dialogflow.cx.v3beta1.DetectIntentRequest.session]. This
* field can be used by the webhook to identify a session.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/sessions/<Session ID>` or `projects/<Project ID>/locations/<Location
* ID>/agents/<Agent ID>/environments/<Environment ID>/sessions/<Session ID>`
* if environment is specified.
* </pre>
*
* <code>string session = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The session.
*/
public java.lang.String getSession() {
java.lang.Object ref = session_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
session_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Always present for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Ignored for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* The unique identifier of the [session][google.cloud.dialogflow.cx.v3beta1.DetectIntentRequest.session]. This
* field can be used by the webhook to identify a session.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/sessions/<Session ID>` or `projects/<Project ID>/locations/<Location
* ID>/agents/<Agent ID>/environments/<Environment ID>/sessions/<Session ID>`
* if environment is specified.
* </pre>
*
* <code>string session = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for session.
*/
public com.google.protobuf.ByteString getSessionBytes() {
java.lang.Object ref = session_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
session_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Always present for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Ignored for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* The unique identifier of the [session][google.cloud.dialogflow.cx.v3beta1.DetectIntentRequest.session]. This
* field can be used by the webhook to identify a session.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/sessions/<Session ID>` or `projects/<Project ID>/locations/<Location
* ID>/agents/<Agent ID>/environments/<Environment ID>/sessions/<Session ID>`
* if environment is specified.
* </pre>
*
* <code>string session = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The session to set.
* @return This builder for chaining.
*/
public Builder setSession(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
session_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Always present for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Ignored for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* The unique identifier of the [session][google.cloud.dialogflow.cx.v3beta1.DetectIntentRequest.session]. This
* field can be used by the webhook to identify a session.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/sessions/<Session ID>` or `projects/<Project ID>/locations/<Location
* ID>/agents/<Agent ID>/environments/<Environment ID>/sessions/<Session ID>`
* if environment is specified.
* </pre>
*
* <code>string session = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearSession() {
session_ = getDefaultInstance().getSession();
onChanged();
return this;
}
/**
*
*
* <pre>
* Always present for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Ignored for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* The unique identifier of the [session][google.cloud.dialogflow.cx.v3beta1.DetectIntentRequest.session]. This
* field can be used by the webhook to identify a session.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/sessions/<Session ID>` or `projects/<Project ID>/locations/<Location
* ID>/agents/<Agent ID>/environments/<Environment ID>/sessions/<Session ID>`
* if environment is specified.
* </pre>
*
* <code>string session = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for session to set.
* @return This builder for chaining.
*/
public Builder setSessionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
session_ = value;
onChanged();
return this;
}
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.Value> parameters_;
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.Value>
internalGetParameters() {
if (parameters_ == null) {
return com.google.protobuf.MapField.emptyMapField(
ParametersDefaultEntryHolder.defaultEntry);
}
return parameters_;
}
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.Value>
internalGetMutableParameters() {
onChanged();
;
if (parameters_ == null) {
parameters_ =
com.google.protobuf.MapField.newMapField(ParametersDefaultEntryHolder.defaultEntry);
}
if (!parameters_.isMutable()) {
parameters_ = parameters_.copy();
}
return parameters_;
}
public int getParametersCount() {
return internalGetParameters().getMap().size();
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
@java.lang.Override
public boolean containsParameters(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
return internalGetParameters().getMap().containsKey(key);
}
/** Use {@link #getParametersMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.Value> getParameters() {
return getParametersMap();
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, com.google.protobuf.Value> getParametersMap() {
return internalGetParameters().getMap();
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.Value getParametersOrDefault(
java.lang.String key, com.google.protobuf.Value defaultValue) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, com.google.protobuf.Value> map =
internalGetParameters().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.Value getParametersOrThrow(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, com.google.protobuf.Value> map =
internalGetParameters().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearParameters() {
internalGetMutableParameters().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
public Builder removeParameters(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
internalGetMutableParameters().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.Value> getMutableParameters() {
return internalGetMutableParameters().getMutableMap();
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
public Builder putParameters(java.lang.String key, com.google.protobuf.Value value) {
if (key == null) {
throw new java.lang.NullPointerException();
}
if (value == null) {
throw new java.lang.NullPointerException();
}
internalGetMutableParameters().getMutableMap().put(key, value);
return this;
}
/**
*
*
* <pre>
* Optional for [WebhookRequest][google.cloud.dialogflow.cx.v3beta1.WebhookRequest]. Optional for [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse].
* All parameters collected from forms and intents during the session.
* Parameters can be created, updated, or removed by the webhook. To remove a
* parameter from the session, the webhook should explicitly set the parameter
* value to null in [WebhookResponse][google.cloud.dialogflow.cx.v3beta1.WebhookResponse]. The map is keyed by parameters'
* display names.
* </pre>
*
* <code>map<string, .google.protobuf.Value> parameters = 2;</code>
*/
public Builder putAllParameters(
java.util.Map<java.lang.String, com.google.protobuf.Value> values) {
internalGetMutableParameters().getMutableMap().putAll(values);
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.SessionInfo)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.SessionInfo)
private static final com.google.cloud.dialogflow.cx.v3beta1.SessionInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.SessionInfo();
}
public static com.google.cloud.dialogflow.cx.v3beta1.SessionInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SessionInfo> PARSER =
new com.google.protobuf.AbstractParser<SessionInfo>() {
@java.lang.Override
public SessionInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SessionInfo(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<SessionInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SessionInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.SessionInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
Derby - Class org.apache.derby.impl.io.vfmem.DataStore
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.io.vfmem;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Iterator;
import org.apache.derby.io.StorageFile;
/**
* A virtual data store, keeping track of all the virtual files existing and
* offering a set of high-level operations on virtual files.
* <p>
* A newly created data store doesn't contain a single existing directory.
*/
public final class DataStore {
/** The path separator used. */
private static final char SEP = PathUtil.SEP;
/** Constant for the empty String array. */
private static final String[] EMPTY_STR_ARR = new String[0];
/** Lock object for the file map. */
private final Object LOCK = new Object();
/** Lock object for the temporary file counter. */
private final Object TMP_COUNTER_LOCK = new Object();
/**
* The files exsiting in the store.
* <p>
* The initial size is set to the number of initial files of a Derby
* database, pluss a few more.
*/
private final Map files = new HashMap(80);
/**
* The name of the database this store serves, expected to be the absolute
* path of the service root (i.e. /tmp/myDB if the database myDB is created
* in /tmp).
*/
private final String databaseName;
/** Counter used to generate unique temporary file names. */
private long tmpFileCounter = 0;
/** Tells if this store is scheduled for deletion. */
private boolean deleteMe;
/**
* Creates a new data store.
*
* @param databaseName the name of the assoicated database, expected to be
* the absolute path of the service root.
*/
public DataStore(String databaseName) {
this.databaseName = databaseName;
}
/**
* Returns the database name, which is expected to equal the path of the
* service root.
*
* @return The database name.
*/
public String getDatabaseName() {
return this.databaseName;
}
/**
* Tells if this data store is scheduled for deletion.
*
* @return {@code true} if the store is awaiting deletion,
* {@code false} otherwise.
*/
public boolean scheduledForDeletion() {
return this.deleteMe;
}
/**
* Creates a new entry in the data store.
* <p>
* This method returns {@code null} if the path already exists, if one of
* the parent directories doesn't exist, or if one of the parents is a
* file instead of a directory.
*
* @param iPath the path of the entry
* @param isDir tells if the new entry shall be directory or a file
* @return A {@code DataStoreEntry}-instance if the entry was successfully
* created, {@code null} otherwise
*/
public DataStoreEntry createEntry(String iPath, boolean isDir) {
// Normalize the path.
final String nPath = new File(iPath).getPath();
synchronized (LOCK) {
if (files.containsKey(nPath)) {
return null;
}
// Make sure the the parent directories exists.
String[] parents = getParentList(nPath);
for (int i=parents.length -1; i >= 0; i--) {
DataStoreEntry entry = (DataStoreEntry)files.get(parents[i]);
if (entry == null) {
return null;
} else if (!entry.isDirectory()) {
return null;
}
}
DataStoreEntry newEntry = new DataStoreEntry(nPath, isDir);
files.put(nPath, newEntry);
return newEntry;
}
}
/**
* Creates all the parents of the specified path.
*
* @return {@code true} if all parents either already existed as directories
* or were created, {@code false} otherwise
*/
public boolean createAllParents(String path) {
final String nPath = new File(path).getPath();
// Iterate through the list and create the missing parents.
String[] parents = getParentList(nPath);
synchronized (LOCK) {
for (int i=parents.length -1; i >= 0; i--) {
String subPath = parents[i];
DataStoreEntry entry = (DataStoreEntry)files.get(subPath);
if (entry == null) {
createEntry(subPath, true);
} else if (!entry.isDirectory()) {
// Fail if one of the parents is a regular file.
return false;
}
}
}
return true;
}
/**
* Deletes the specified entry.
* <p>
* If the specified entry is a directory, it is only deleted if it is
* empty. Read-only entries are deleted.
*
* @param iPath path of the entry to delete
* @return {@code true} if the entry was deleted, {@code false} otherwise.
*/
public boolean deleteEntry(String iPath) {
final String nPath = new File(iPath).getPath();
DataStoreEntry entry;
synchronized (LOCK) {
entry = (DataStoreEntry)files.remove(nPath);
if (entry != null) {
if (entry.isDirectory()) {
String[] children = listChildren(nPath);
if (children.length > 0) {
// Re-add the entry.
files.put(nPath, entry);
return false;
}
// Check if we just deleted the service root. Normally the
// service would be deleted using deleteAll.
if (nPath.equals(databaseName) &&
files.get(databaseName) == null) {
// Service root deleted, mark this store for removal.
deleteMe = true;
}
}
entry.release();
}
}
return (entry != null);
}
/**
* Returns the entry with the specified path.
*
* @param iPath path of the entry to fetch
* @return {@code null} if the entry doesn't exist, the
* {@code DataStoreEntry}-object otherwise.
*/
public DataStoreEntry getEntry(String iPath) {
synchronized (LOCK) {
// Use java.io.File to normalize the path.
return (DataStoreEntry)files.get(new File(iPath).getPath());
}
}
/**
* Deletes the specified entry and all its children.
*
* @param iPath the root entry
* @return {@code true} if the entry and all its children were deleted,
* {@code false} if the root doesn't exist.
*/
public boolean deleteAll(String iPath) {
final String nPath = new File(iPath).getPath();
synchronized (LOCK) {
DataStoreEntry entry = (DataStoreEntry)files.remove(nPath);
if (entry == null) {
// Delete root doesn't exist.
return false;
} else if (entry.isDirectory()) {
// Delete root is a directory.
boolean deleted = _deleteAll(nPath);
if (files.get(databaseName) == null) {
// The service root has been deleted, which means that all
// the data has been deleted. Mark this store for removal.
deleteMe = true;
}
return deleted;
} else {
// Delete root is a file.
entry.release();
return true;
}
}
}
/**
* Lists the childen of the specified path.
*
* @param iPath the directory to list the children of
* @return An array with the relative paths of the children.
*/
public String[] listChildren(String iPath) {
// TODO: Disallow the empty string, or use databaseName?
if (iPath.equals("")) {
throw new IllegalArgumentException(
"The empty string is not a valid path");
}
String nPath = new File(iPath).getPath();
// Make sure the search path ends with the separator.
if (nPath.charAt(nPath.length() -1) != SEP) {
nPath += SEP;
}
ArrayList children = new ArrayList();
synchronized (LOCK) {
Iterator paths = files.keySet().iterator();
String candidate;
while (paths.hasNext()) {
candidate = (String)paths.next();
if (candidate.startsWith(nPath)) {
children.add(candidate.substring(nPath.length()));
}
}
}
return (String[])children.toArray(EMPTY_STR_ARR);
}
/**
* Moves / renames a file.
*
* @param currentFile the current file
* @param newFile the new file
* @return {@code true} if the file was moved, {@code false} if the new
* file already existed or the existing file doesn't exist.
*/
public boolean move(StorageFile currentFile, StorageFile newFile) {
final String currentPath = new File(currentFile.getPath()).getPath();
final String newPath = new File(newFile.getPath()).getPath();
synchronized (LOCK) {
if (files.containsKey(newPath)) {
return false;
}
DataStoreEntry current = (DataStoreEntry)
files.remove(currentPath);
if (current == null) {
return false;
}
files.put(newPath, current);
return true;
}
}
/**
* Purges the database and releases all files associated with it.
*/
public void purge() {
synchronized (LOCK) {
Iterator fileIter = files.values().iterator();
while (fileIter.hasNext()) {
((DataStoreEntry)fileIter.next()).release();
}
// Clear all the mappings.
files.clear();
}
}
/**
* Deletes every child of the root path specified.
* <p>
* Note that the root itself must be removed outside of this method.
*
* @param prefixPath the normalized root path to start deleting from
* @return {@code true} if all children of the root path were deleted,
* {@code false} otherwise.
*/
//@GuardedBy("LOCK")
private boolean _deleteAll(String prefixPath) {
// Make sure the search path ends with the separator.
if (prefixPath.charAt(prefixPath.length() -1) != SEP) {
prefixPath += SEP;
}
ArrayList toDelete = new ArrayList();
Iterator paths = files.keySet().iterator();
// Find all the entries to delete.
while (paths.hasNext()) {
String path = (String)paths.next();
if (path.startsWith(prefixPath)) {
toDelete.add(path);
}
}
// Note that the root itself has already been removed before this
// method was called. In this case, the root has to be a directory.
// Iterate through all entries found and release them.
Iterator keys = toDelete.iterator();
while (keys.hasNext()) {
DataStoreEntry entry = (DataStoreEntry)
files.remove((String)keys.next());
entry.release();
}
return true;
}
/**
* Returns an identifier for a temporary file.
*
* @return An integer uniquely identifying a temporary file.
*/
public long getTempFileCounter() {
synchronized (TMP_COUNTER_LOCK) {
return ++tmpFileCounter;
}
}
/**
* Returns the list of parents for the specified path.
* <p>
* The lowest level parent is listed first in the list, so all absolute
* paths will have the root listed as the last element.
*
* @param path the normalized path to create a parent list for
* @return A list of parents.
*/
private String[] getParentList(String path) {
ArrayList parents = new ArrayList();
String parent = path;
// Build the list of parents.
while ((parent = new File(parent).getParent()) != null) {
parents.add(parent);
}
return (String[])parents.toArray(new String[parents.size()]);
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered.org <http://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.mod.event;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.base.Optional;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.reflect.TypeToken;
import com.google.inject.Inject;
import net.minecraftforge.fml.common.eventhandler.EventPriority;
import net.minecraftforge.fml.common.eventhandler.IEventListener;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.plugin.PluginManager;
import org.spongepowered.api.service.event.EventManager;
import org.spongepowered.api.util.event.Cancellable;
import org.spongepowered.api.util.event.Event;
import org.spongepowered.api.util.event.Order;
import org.spongepowered.api.util.event.Subscribe;
import org.spongepowered.mod.SpongeMod;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
public class SpongeEventBus implements EventManager {
private final Object lock = new Object();
private final PluginManager pluginManager;
private final HandlerFactory handlerFactory = new HandlerClassFactory("org.spongepowered.mod.event.handler");
private final Multimap<Class<?>, RegisteredHandler> handlersByEvent = HashMultimap.create();
/**
* A cache of all the handlers for an event type for quick event posting.
*
* <p>
* The cache is currently entirely invalidated if handlers are added or
* removed.
* </p>
*/
private final LoadingCache<Class<?>, HandlerCache> handlersCache =
CacheBuilder.newBuilder().build(new CacheLoader<Class<?>, HandlerCache>() {
@Override
public HandlerCache load(Class<?> type) throws Exception {
return bakeHandlers(type);
}
});
private final ImmutableMap<EventPriority, Order> priorityMappings = new ImmutableMap.Builder<EventPriority, Order>()
.put(EventPriority.HIGHEST, Order.FIRST)
.put(EventPriority.HIGH, Order.EARLY)
.put(EventPriority.NORMAL, Order.DEFAULT)
.put(EventPriority.LOW, Order.LATE)
.put(EventPriority.LOWEST, Order.LAST)
.build();
@Inject
public SpongeEventBus(PluginManager pluginManager) {
checkNotNull(pluginManager, "pluginManager");
this.pluginManager = pluginManager;
}
private static boolean isValidHandler(Method method) {
Class<?>[] paramTypes = method.getParameterTypes();
return !Modifier.isStatic(method.getModifiers())
&& !Modifier.isAbstract(method.getModifiers())
&& !Modifier.isInterface(method.getDeclaringClass().getModifiers())
&& method.getReturnType() == void.class
&& paramTypes.length == 1
&& Event.class.isAssignableFrom(paramTypes[0]);
}
@SuppressWarnings({"unchecked", "rawtypes"})
private HandlerCache bakeHandlers(Class<?> rootType) {
List<RegisteredHandler> registrations = Lists.newArrayList();
Set<Class<?>> types = (Set) TypeToken.of(rootType).getTypes().rawTypes();
synchronized (this.lock) {
for (Class<?> type : types) {
if (Event.class.isAssignableFrom(type)) {
registrations.addAll(this.handlersByEvent.get(type));
}
}
}
Collections.sort(registrations);
return new HandlerCache(registrations);
}
private HandlerCache getHandlerCache(Class<?> type) {
return this.handlersCache.getUnchecked(type);
}
@SuppressWarnings("unchecked")
private List<Subscriber> findAllSubscribers(Object object) {
List<Subscriber> subscribers = Lists.newArrayList();
Class<?> type = object.getClass();
for (Method method : type.getMethods()) {
@Nullable
Subscribe subscribe = method.getAnnotation(Subscribe.class);
if (subscribe != null) {
Class<?>[] paramTypes = method.getParameterTypes();
if (isValidHandler(method)) {
Class<Event> eventClass = (Class<Event>) paramTypes[0];
Handler handler = this.handlerFactory.createHandler(object, method, subscribe.ignoreCancelled());
subscribers.add(new Subscriber(eventClass, handler, subscribe.order()));
} else {
SpongeMod.instance.getLogger().warn("The method {} on {} has @{} but has the wrong signature",
method, method.getDeclaringClass().getName(), Subscribe.class.getName());
}
}
}
return subscribers;
}
public boolean register(Class<?> type, Handler handler, Order order, PluginContainer container) {
return register(new Subscriber(type, handler, order), container);
}
public boolean register(Subscriber subscriber, PluginContainer container) {
return registerAll(Lists.newArrayList(subscriber), container);
}
private boolean registerAll(List<Subscriber> subscribers, PluginContainer container) {
synchronized (this.lock) {
boolean changed = false;
for (Subscriber sub : subscribers) {
if (this.handlersByEvent.put(sub.getEventClass(), new RegisteredHandler(sub.getHandler(), sub.getOrder(), container))) {
changed = true;
}
}
if (changed) {
this.handlersCache.invalidateAll();
}
return changed;
}
}
public boolean unregister(Class<?> type, Handler handler) {
return unregister(new Subscriber(type, handler));
}
public boolean unregister(Subscriber subscriber) {
return unregisterAll(Lists.newArrayList(subscriber));
}
public boolean unregisterAll(List<Subscriber> subscribers) {
synchronized (this.lock) {
boolean changed = false;
for (Subscriber sub : subscribers) {
if (this.handlersByEvent.remove(sub.getEventClass(), RegisteredHandler.createForComparison(sub.getHandler()))) {
changed = true;
}
}
if (changed) {
this.handlersCache.invalidateAll();
}
return changed;
}
}
private void callListener(Handler handler, Event event) {
try {
handler.handle(event);
} catch (Throwable t) {
SpongeMod.instance.getLogger().warn("A handler raised an error when handling an event", t);
}
}
public void register(PluginContainer container, Object object) {
checkNotNull(container, "plugin");
checkNotNull(object, "object");
registerAll(findAllSubscribers(object), container);
}
@Override
public void register(Object plugin, Object object) {
checkNotNull(plugin, "plugin");
checkNotNull(object, "object");
Optional<PluginContainer> container = this.pluginManager.fromInstance(plugin);
if (!container.isPresent()) {
throw new IllegalArgumentException("The specified object is not a plugin object");
}
registerAll(findAllSubscribers(object), container.get());
}
@Override
public void unregister(Object object) {
checkNotNull(object, "object");
unregisterAll(findAllSubscribers(object));
}
public boolean post(net.minecraftforge.fml.common.eventhandler.Event forgeEvent, IEventListener[] listeners) {
checkNotNull(forgeEvent, "forgeEvent");
Order orderStart = Order.PRE;
HandlerCache handlerCache = getHandlerCache(forgeEvent.getClass());
for (IEventListener listener : listeners) {
if (listener instanceof EventPriority) {
Order order = this.priorityMappings.get(listener);
for (int orderIndex = 0; orderIndex <= order.ordinal(); orderIndex++) {
Order currentOrder = Order.values()[orderIndex];
for (Handler handler : handlerCache.getHandlersByOrder(currentOrder)) {
callListener(handler, (Event) forgeEvent);
}
}
orderStart = Order.values()[order.ordinal() + 1];
}
try {
listener.invoke(forgeEvent);
} catch (Throwable throwable) {
SpongeMod.instance.getLogger().catching(throwable);
}
}
for (int orderIndex = orderStart.ordinal(); orderIndex <= Order.POST.ordinal(); orderIndex++) {
Order currentOrder = Order.values()[orderIndex];
for (Handler handler : handlerCache.getHandlersByOrder(currentOrder)) {
callListener(handler, (Event) forgeEvent);
}
}
return (forgeEvent.isCancelable() ? forgeEvent.isCanceled() : false);
}
@Override
public boolean post(Event event) {
checkNotNull(event, "event");
for (Handler handler : getHandlerCache(event.getClass()).getHandlers()) {
callListener(handler, event);
}
return event instanceof Cancellable && ((Cancellable) event).isCancelled();
}
public boolean post(Event event, Order order) {
checkNotNull(event, "event");
checkNotNull(event, "order");
for (Handler handler : getHandlerCache(event.getClass()).getHandlersByOrder(order)) {
callListener(handler, event);
}
return event instanceof Cancellable && ((Cancellable) event).isCancelled();
}
}
| |
package org.apache.maven.repository.internal;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.lang3.Validate;
import org.apache.maven.artifact.repository.metadata.Snapshot;
import org.apache.maven.artifact.repository.metadata.SnapshotVersion;
import org.apache.maven.artifact.repository.metadata.Versioning;
import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.annotations.Requirement;
import org.codehaus.plexus.util.IOUtil;
import org.codehaus.plexus.util.StringUtils;
import org.eclipse.aether.RepositoryCache;
import org.eclipse.aether.RepositoryEvent;
import org.eclipse.aether.RepositoryEvent.EventType;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.RequestTrace;
import org.eclipse.aether.SyncContext;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.impl.MetadataResolver;
import org.eclipse.aether.impl.RepositoryEventDispatcher;
import org.eclipse.aether.impl.SyncContextFactory;
import org.eclipse.aether.impl.VersionResolver;
import org.eclipse.aether.internal.impl.CacheUtils;
import org.eclipse.aether.metadata.DefaultMetadata;
import org.eclipse.aether.metadata.Metadata;
import org.eclipse.aether.repository.ArtifactRepository;
import org.eclipse.aether.repository.LocalRepository;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.repository.WorkspaceReader;
import org.eclipse.aether.repository.WorkspaceRepository;
import org.eclipse.aether.resolution.MetadataRequest;
import org.eclipse.aether.resolution.MetadataResult;
import org.eclipse.aether.resolution.VersionRequest;
import org.eclipse.aether.resolution.VersionResolutionException;
import org.eclipse.aether.resolution.VersionResult;
import org.eclipse.aether.spi.locator.Service;
import org.eclipse.aether.spi.locator.ServiceLocator;
import org.eclipse.aether.spi.log.Logger;
import org.eclipse.aether.spi.log.LoggerFactory;
import org.eclipse.aether.spi.log.NullLoggerFactory;
import org.eclipse.aether.util.ConfigUtils;
import javax.inject.Inject;
import javax.inject.Named;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author Benjamin Bentmann
*/
@Named
@Component( role = VersionResolver.class )
public class DefaultVersionResolver
implements VersionResolver, Service
{
private static final String MAVEN_METADATA_XML = "maven-metadata.xml";
private static final String RELEASE = "RELEASE";
private static final String LATEST = "LATEST";
private static final String SNAPSHOT = "SNAPSHOT";
@SuppressWarnings( "unused" )
@Requirement( role = LoggerFactory.class )
private Logger logger = NullLoggerFactory.LOGGER;
@Requirement
private MetadataResolver metadataResolver;
@Requirement
private SyncContextFactory syncContextFactory;
@Requirement
private RepositoryEventDispatcher repositoryEventDispatcher;
public DefaultVersionResolver()
{
// enable no-arg constructor
}
@Inject
DefaultVersionResolver( MetadataResolver metadataResolver, SyncContextFactory syncContextFactory,
RepositoryEventDispatcher repositoryEventDispatcher, LoggerFactory loggerFactory )
{
setMetadataResolver( metadataResolver );
setSyncContextFactory( syncContextFactory );
setLoggerFactory( loggerFactory );
setRepositoryEventDispatcher( repositoryEventDispatcher );
}
public void initService( ServiceLocator locator )
{
setLoggerFactory( locator.getService( LoggerFactory.class ) );
setMetadataResolver( locator.getService( MetadataResolver.class ) );
setSyncContextFactory( locator.getService( SyncContextFactory.class ) );
setRepositoryEventDispatcher( locator.getService( RepositoryEventDispatcher.class ) );
}
public DefaultVersionResolver setLoggerFactory( LoggerFactory loggerFactory )
{
this.logger = NullLoggerFactory.getSafeLogger( loggerFactory, getClass() );
return this;
}
void setLogger( LoggerFactory loggerFactory )
{
// plexus support
setLoggerFactory( loggerFactory );
}
public DefaultVersionResolver setMetadataResolver( MetadataResolver metadataResolver )
{
this.metadataResolver = Validate.notNull( metadataResolver, "metadataResolver cannot be null" );
return this;
}
public DefaultVersionResolver setSyncContextFactory( SyncContextFactory syncContextFactory )
{
this.syncContextFactory = Validate.notNull( syncContextFactory, "syncContextFactory cannot be null" );
return this;
}
public DefaultVersionResolver setRepositoryEventDispatcher( RepositoryEventDispatcher repositoryEventDispatcher )
{
this.repositoryEventDispatcher = Validate.notNull( repositoryEventDispatcher,
"repositoryEventDispatcher cannot be null" );
return this;
}
public VersionResult resolveVersion( RepositorySystemSession session, VersionRequest request )
throws VersionResolutionException
{
RequestTrace trace = RequestTrace.newChild( request.getTrace(), request );
Artifact artifact = request.getArtifact();
String version = artifact.getVersion();
VersionResult result = new VersionResult( request );
Key cacheKey = null;
RepositoryCache cache = session.getCache();
if ( cache != null && !ConfigUtils.getBoolean( session, false, "aether.versionResolver.noCache" ) )
{
cacheKey = new Key( session, request );
Object obj = cache.get( session, cacheKey );
if ( obj instanceof Record )
{
Record record = (Record) obj;
result.setVersion( record.version );
result.setRepository(
CacheUtils.getRepository( session, request.getRepositories(), record.repoClass, record.repoId ) );
return result;
}
}
Metadata metadata;
if ( RELEASE.equals( version ) )
{
metadata = new DefaultMetadata( artifact.getGroupId(), artifact.getArtifactId(), MAVEN_METADATA_XML,
Metadata.Nature.RELEASE );
}
else if ( LATEST.equals( version ) )
{
metadata = new DefaultMetadata( artifact.getGroupId(), artifact.getArtifactId(), MAVEN_METADATA_XML,
Metadata.Nature.RELEASE_OR_SNAPSHOT );
}
else if ( version.endsWith( SNAPSHOT ) )
{
WorkspaceReader workspace = session.getWorkspaceReader();
if ( workspace != null && workspace.findVersions( artifact ).contains( version ) )
{
metadata = null;
result.setRepository( workspace.getRepository() );
}
else
{
metadata =
new DefaultMetadata( artifact.getGroupId(), artifact.getArtifactId(), version, MAVEN_METADATA_XML,
Metadata.Nature.SNAPSHOT );
}
}
else
{
metadata = null;
}
if ( metadata == null )
{
result.setVersion( version );
}
else
{
List<MetadataRequest> metadataReqs = new ArrayList<>( request.getRepositories().size() );
metadataReqs.add( new MetadataRequest( metadata, null, request.getRequestContext() ) );
for ( RemoteRepository repository : request.getRepositories() )
{
MetadataRequest metadataRequest =
new MetadataRequest( metadata, repository, request.getRequestContext() );
metadataRequest.setDeleteLocalCopyIfMissing( true );
metadataRequest.setFavorLocalRepository( true );
metadataRequest.setTrace( trace );
metadataReqs.add( metadataRequest );
}
List<MetadataResult> metadataResults = metadataResolver.resolveMetadata( session, metadataReqs );
Map<String, VersionInfo> infos = new HashMap<>();
for ( MetadataResult metadataResult : metadataResults )
{
result.addException( metadataResult.getException() );
ArtifactRepository repository = metadataResult.getRequest().getRepository();
if ( repository == null )
{
repository = session.getLocalRepository();
}
Versioning v = readVersions( session, trace, metadataResult.getMetadata(), repository, result );
merge( artifact, infos, v, repository );
}
if ( RELEASE.equals( version ) )
{
resolve( result, infos, RELEASE );
}
else if ( LATEST.equals( version ) )
{
if ( !resolve( result, infos, LATEST ) )
{
resolve( result, infos, RELEASE );
}
if ( result.getVersion() != null && result.getVersion().endsWith( SNAPSHOT ) )
{
VersionRequest subRequest = new VersionRequest();
subRequest.setArtifact( artifact.setVersion( result.getVersion() ) );
if ( result.getRepository() instanceof RemoteRepository )
{
RemoteRepository r = (RemoteRepository) result.getRepository();
subRequest.setRepositories( Collections.singletonList( r ) );
}
else
{
subRequest.setRepositories( request.getRepositories() );
}
VersionResult subResult = resolveVersion( session, subRequest );
result.setVersion( subResult.getVersion() );
result.setRepository( subResult.getRepository() );
for ( Exception exception : subResult.getExceptions() )
{
result.addException( exception );
}
}
}
else
{
String key = SNAPSHOT + getKey( artifact.getClassifier(), artifact.getExtension() );
merge( infos, SNAPSHOT, key );
if ( !resolve( result, infos, key ) )
{
result.setVersion( version );
}
}
if ( StringUtils.isEmpty( result.getVersion() ) )
{
throw new VersionResolutionException( result );
}
}
if ( cacheKey != null && metadata != null && isSafelyCacheable( session, artifact ) )
{
cache.put( session, cacheKey, new Record( result.getVersion(), result.getRepository() ) );
}
return result;
}
private boolean resolve( VersionResult result, Map<String, VersionInfo> infos, String key )
{
VersionInfo info = infos.get( key );
if ( info != null )
{
result.setVersion( info.version );
result.setRepository( info.repository );
}
return info != null;
}
private Versioning readVersions( RepositorySystemSession session, RequestTrace trace, Metadata metadata,
ArtifactRepository repository, VersionResult result )
{
Versioning versioning = null;
FileInputStream fis = null;
try
{
if ( metadata != null )
{
try ( SyncContext syncContext = syncContextFactory.newInstance( session, true ) )
{
syncContext.acquire( null, Collections.singleton( metadata ) );
if ( metadata.getFile() != null && metadata.getFile().exists() )
{
fis = new FileInputStream( metadata.getFile() );
org.apache.maven.artifact.repository.metadata.Metadata m =
new MetadataXpp3Reader().read( fis, false );
versioning = m.getVersioning();
/*
* NOTE: Users occasionally misuse the id "local" for remote repos which screws up the metadata
* of the local repository. This is especially troublesome during snapshot resolution so we try
* to handle that gracefully.
*/
if ( versioning != null && repository instanceof LocalRepository )
{
if ( versioning.getSnapshot() != null && versioning.getSnapshot().getBuildNumber() > 0 )
{
Versioning repaired = new Versioning();
repaired.setLastUpdated( versioning.getLastUpdated() );
Snapshot snapshot = new Snapshot();
snapshot.setLocalCopy( true );
repaired.setSnapshot( snapshot );
versioning = repaired;
throw new IOException( "Snapshot information corrupted with remote repository data"
+ ", please verify that no remote repository uses the id '"
+ repository.getId() + "'" );
}
}
}
}
}
}
catch ( Exception e )
{
invalidMetadata( session, trace, metadata, repository, e );
result.addException( e );
}
finally
{
IOUtil.close( fis );
}
return ( versioning != null ) ? versioning : new Versioning();
}
private void invalidMetadata( RepositorySystemSession session, RequestTrace trace, Metadata metadata,
ArtifactRepository repository, Exception exception )
{
RepositoryEvent.Builder event = new RepositoryEvent.Builder( session, EventType.METADATA_INVALID );
event.setTrace( trace );
event.setMetadata( metadata );
event.setException( exception );
event.setRepository( repository );
repositoryEventDispatcher.dispatch( event.build() );
}
private void merge( Artifact artifact, Map<String, VersionInfo> infos, Versioning versioning,
ArtifactRepository repository )
{
if ( StringUtils.isNotEmpty( versioning.getRelease() ) )
{
merge( RELEASE, infos, versioning.getLastUpdated(), versioning.getRelease(), repository );
}
if ( StringUtils.isNotEmpty( versioning.getLatest() ) )
{
merge( LATEST, infos, versioning.getLastUpdated(), versioning.getLatest(), repository );
}
for ( SnapshotVersion sv : versioning.getSnapshotVersions() )
{
if ( StringUtils.isNotEmpty( sv.getVersion() ) )
{
String key = getKey( sv.getClassifier(), sv.getExtension() );
merge( SNAPSHOT + key, infos, sv.getUpdated(), sv.getVersion(), repository );
}
}
Snapshot snapshot = versioning.getSnapshot();
if ( snapshot != null && versioning.getSnapshotVersions().isEmpty() )
{
String version = artifact.getVersion();
if ( snapshot.getTimestamp() != null && snapshot.getBuildNumber() > 0 )
{
String qualifier = snapshot.getTimestamp() + '-' + snapshot.getBuildNumber();
version = version.substring( 0, version.length() - SNAPSHOT.length() ) + qualifier;
}
merge( SNAPSHOT, infos, versioning.getLastUpdated(), version, repository );
}
}
private void merge( String key, Map<String, VersionInfo> infos, String timestamp, String version,
ArtifactRepository repository )
{
VersionInfo info = infos.get( key );
if ( info == null )
{
info = new VersionInfo( timestamp, version, repository );
infos.put( key, info );
}
else if ( info.isOutdated( timestamp ) )
{
info.version = version;
info.repository = repository;
info.timestamp = timestamp;
}
}
private void merge( Map<String, VersionInfo> infos, String srcKey, String dstKey )
{
VersionInfo srcInfo = infos.get( srcKey );
VersionInfo dstInfo = infos.get( dstKey );
if ( dstInfo == null || ( srcInfo != null && dstInfo.isOutdated( srcInfo.timestamp )
&& srcInfo.repository != dstInfo.repository ) )
{
infos.put( dstKey, srcInfo );
}
}
private String getKey( String classifier, String extension )
{
return StringUtils.clean( classifier ) + ':' + StringUtils.clean( extension );
}
private boolean isSafelyCacheable( RepositorySystemSession session, Artifact artifact )
{
/*
* The workspace/reactor is in flux so we better not assume definitive information for any of its
* artifacts/projects.
*/
WorkspaceReader workspace = session.getWorkspaceReader();
if ( workspace == null )
{
return true;
}
Artifact pomArtifact = ArtifactDescriptorUtils.toPomArtifact( artifact );
return workspace.findArtifact( pomArtifact ) == null;
}
private static class VersionInfo
{
String timestamp;
String version;
ArtifactRepository repository;
public VersionInfo( String timestamp, String version, ArtifactRepository repository )
{
this.timestamp = ( timestamp != null ) ? timestamp : "";
this.version = version;
this.repository = repository;
}
public boolean isOutdated( String timestamp )
{
return timestamp != null && timestamp.compareTo( this.timestamp ) > 0;
}
}
private static class Key
{
private final String groupId;
private final String artifactId;
private final String classifier;
private final String extension;
private final String version;
private final String context;
private final File localRepo;
private final WorkspaceRepository workspace;
private final List<RemoteRepository> repositories;
private final int hashCode;
public Key( RepositorySystemSession session, VersionRequest request )
{
Artifact artifact = request.getArtifact();
groupId = artifact.getGroupId();
artifactId = artifact.getArtifactId();
classifier = artifact.getClassifier();
extension = artifact.getExtension();
version = artifact.getVersion();
localRepo = session.getLocalRepository().getBasedir();
workspace = CacheUtils.getWorkspace( session );
repositories = new ArrayList<>( request.getRepositories().size() );
boolean repoMan = false;
for ( RemoteRepository repository : request.getRepositories() )
{
if ( repository.isRepositoryManager() )
{
repoMan = true;
repositories.addAll( repository.getMirroredRepositories() );
}
else
{
repositories.add( repository );
}
}
context = repoMan ? request.getRequestContext() : "";
int hash = 17;
hash = hash * 31 + groupId.hashCode();
hash = hash * 31 + artifactId.hashCode();
hash = hash * 31 + classifier.hashCode();
hash = hash * 31 + extension.hashCode();
hash = hash * 31 + version.hashCode();
hash = hash * 31 + localRepo.hashCode();
hash = hash * 31 + CacheUtils.repositoriesHashCode( repositories );
hashCode = hash;
}
@Override
public boolean equals( Object obj )
{
if ( obj == this )
{
return true;
}
else if ( obj == null || !getClass().equals( obj.getClass() ) )
{
return false;
}
Key that = (Key) obj;
return artifactId.equals( that.artifactId ) && groupId.equals( that.groupId ) && classifier.equals(
that.classifier ) && extension.equals( that.extension ) && version.equals( that.version )
&& context.equals( that.context ) && localRepo.equals( that.localRepo )
&& CacheUtils.eq( workspace, that.workspace )
&& CacheUtils.repositoriesEquals( repositories, that.repositories );
}
@Override
public int hashCode()
{
return hashCode;
}
}
private static class Record
{
final String version;
final String repoId;
final Class<?> repoClass;
public Record( String version, ArtifactRepository repository )
{
this.version = version;
if ( repository != null )
{
repoId = repository.getId();
repoClass = repository.getClass();
}
else
{
repoId = null;
repoClass = null;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.lipstick;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.StringReader;
import java.text.ParseException;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import jline.ConsoleReader;
import jline.ConsoleReaderInputStream;
import jline.History;
import org.antlr.runtime.RecognitionException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.apache.pig.LoadFunc;
import org.apache.pig.PigException;
import org.apache.pig.PigConfiguration;
import org.apache.pig.PigRunner.ReturnCode;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.classification.InterfaceAudience;
import org.apache.pig.classification.InterfaceStability;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.PigImplConstants;
import org.apache.pig.impl.io.FileLocalizer;
import org.apache.pig.impl.util.JarManager;
import org.apache.pig.impl.util.LogUtils;
import org.apache.pig.impl.util.ObjectSerializer;
import org.apache.pig.impl.util.PropertiesUtil;
import org.apache.pig.impl.util.UDFContext;
import org.apache.pig.impl.util.Utils;
import org.apache.pig.parser.DryRunGruntParser;
import org.apache.pig.scripting.ScriptEngine;
import org.apache.pig.scripting.ScriptEngine.SupportedScriptLang;
import org.apache.pig.tools.cmdline.CmdLineParser;
import org.apache.pig.tools.grunt.LipstickGrunt;
import org.apache.pig.tools.pigstats.PigProgressNotificationListener;
import org.apache.pig.tools.pigstats.PigStats;
import org.apache.pig.tools.pigstats.PigStatsUtil;
import org.apache.pig.tools.pigstats.ScriptState;
import org.apache.pig.tools.timer.PerformanceTimerFactory;
import com.google.common.io.CharStreams;
/**
* Main class for Pig engine.
*/
@InterfaceAudience.LimitedPrivate({"Oozie"})
@InterfaceStability.Stable
public class Main {
private final static Log log = LogFactory.getLog(Main.class);
private static final String LOG4J_CONF = "log4jconf";
private static final String BRIEF = "brief";
private static final String DEBUG = "debug";
private static final String VERBOSE = "verbose";
private static final String version;
private static final String majorVersion;
private static final String minorVersion;
private static final String patchVersion;
private static final String svnRevision;
private static final String buildTime;
private enum ExecMode {STRING, FILE, SHELL, UNKNOWN}
private static final String PROP_FILT_SIMPL_OPT
= "pig.exec.filterLogicExpressionSimplifier";
static {
try {
log.info("\n"
+ CharStreams.toString(new InputStreamReader(Main.class.getResourceAsStream("/lipstick_build.txt"),
"UTF-8")));
} catch (IOException e) {
}
}
protected static final String PROGRESS_NOTIFICATION_LISTENER_KEY = "pig.notification.listener";
protected static final String PROGRESS_NOTIFICATION_LISTENER_ARG_KEY = "pig.notification.listener.arg";
static {
Attributes attr=null;
try {
String findContainingJar = JarManager.findContainingJar(Main.class);
if (findContainingJar != null) {
JarFile jar = new JarFile(findContainingJar);
final Manifest manifest = jar.getManifest();
final Map<String,Attributes> attrs = manifest.getEntries();
attr = attrs.get("org/apache/pig");
} else {
log.info("Unable to read pigs manifest file as we are not running from a jar, version information unavailable");
}
} catch (Exception e) {
log.warn("Unable to read pigs manifest file, version information unavailable", e);
}
if (attr!=null) {
version = attr.getValue("Implementation-Version");
svnRevision = attr.getValue("Svn-Revision");
buildTime = attr.getValue("Build-TimeStamp");
String[] split = version.split("\\.");
majorVersion=split[0];
minorVersion=split[1];
patchVersion=split[2];
} else {
version=null;
majorVersion=null;
minorVersion=null;
patchVersion=null;
svnRevision=null;
buildTime=null;
}
}
/**
* The Main-Class for the Pig Jar that will provide a shell and setup a classpath appropriate
* for executing Jar files. Warning, this method calls System.exit().
*
* @param args
* -jar can be used to add additional jar files (colon separated). - will start a
* shell. -e will execute the rest of the command line as if it was input to the
* shell.
* @throws IOException
*/
public static void main(String args[]) {
System.exit(run(args, null));
}
static int run(String args[], PigProgressNotificationListener listener) {
int rc = 1;
boolean verbose = false;
boolean gruntCalled = false;
boolean deleteTempFiles = true;
String logFileName = null;
try {
Configuration conf = new Configuration(false);
GenericOptionsParser parser = new GenericOptionsParser(conf, args);
conf = parser.getConfiguration();
Properties properties = new Properties();
PropertiesUtil.loadDefaultProperties(properties);
properties.putAll(ConfigurationUtil.toProperties(conf));
if (listener == null) {
listener = makeListener(properties);
}
String[] pigArgs = parser.getRemainingArgs();
boolean userSpecifiedLog = false;
boolean checkScriptOnly = false;
BufferedReader pin = null;
boolean debug = false;
boolean dryrun = false;
boolean embedded = false;
List<String> params = new ArrayList<String>();
List<String> paramFiles = new ArrayList<String>();
HashSet<String> disabledOptimizerRules = new HashSet<String>();
CmdLineParser opts = new CmdLineParser(pigArgs);
opts.registerOpt('4', "log4jconf", CmdLineParser.ValueExpected.REQUIRED);
opts.registerOpt('b', "brief", CmdLineParser.ValueExpected.NOT_ACCEPTED);
opts.registerOpt('c', "check", CmdLineParser.ValueExpected.NOT_ACCEPTED);
opts.registerOpt('d', "debug", CmdLineParser.ValueExpected.REQUIRED);
opts.registerOpt('e', "execute", CmdLineParser.ValueExpected.NOT_ACCEPTED);
opts.registerOpt('f', "file", CmdLineParser.ValueExpected.REQUIRED);
opts.registerOpt('g', "embedded", CmdLineParser.ValueExpected.REQUIRED);
opts.registerOpt('h', "help", CmdLineParser.ValueExpected.OPTIONAL);
opts.registerOpt('i', "version", CmdLineParser.ValueExpected.OPTIONAL);
opts.registerOpt('l', "logfile", CmdLineParser.ValueExpected.REQUIRED);
opts.registerOpt('m', "param_file", CmdLineParser.ValueExpected.OPTIONAL);
opts.registerOpt('p', "param", CmdLineParser.ValueExpected.OPTIONAL);
opts.registerOpt('r', "dryrun", CmdLineParser.ValueExpected.NOT_ACCEPTED);
opts.registerOpt('t', "optimizer_off", CmdLineParser.ValueExpected.REQUIRED);
opts.registerOpt('v', "verbose", CmdLineParser.ValueExpected.NOT_ACCEPTED);
opts.registerOpt('w', "warning", CmdLineParser.ValueExpected.NOT_ACCEPTED);
opts.registerOpt('x', "exectype", CmdLineParser.ValueExpected.REQUIRED);
opts.registerOpt('F', "stop_on_failure", CmdLineParser.ValueExpected.NOT_ACCEPTED);
opts.registerOpt('M', "no_multiquery", CmdLineParser.ValueExpected.NOT_ACCEPTED);
opts.registerOpt('N', "no_fetch", CmdLineParser.ValueExpected.NOT_ACCEPTED);
opts.registerOpt('P', "propertyFile", CmdLineParser.ValueExpected.REQUIRED);
ExecMode mode = ExecMode.UNKNOWN;
String file = null;
String engine = null;
// set up client side system properties in UDF context
UDFContext.getUDFContext().setClientSystemProps(properties);
char opt;
while ((opt = opts.getNextOpt()) != CmdLineParser.EndOfOpts) {
switch (opt) {
case '4':
String log4jconf = opts.getValStr();
if(log4jconf != null){
properties.setProperty(LOG4J_CONF, log4jconf);
}
break;
case 'b':
properties.setProperty(BRIEF, "true");
break;
case 'c':
checkScriptOnly = true;
break;
case 'd':
String logLevel = opts.getValStr();
if (logLevel != null) {
properties.setProperty(DEBUG, logLevel);
}
debug = true;
break;
case 'e':
mode = ExecMode.STRING;
break;
case 'f':
mode = ExecMode.FILE;
file = opts.getValStr();
break;
case 'g':
embedded = true;
engine = opts.getValStr();
break;
case 'F':
properties.setProperty("stop.on.failure", ""+true);
break;
case 'h':
String topic = opts.getValStr();
if (topic != null)
if (topic.equalsIgnoreCase("properties"))
printProperties();
else{
System.out.println("Invalide help topic - " + topic);
usage();
}
else
usage();
return ReturnCode.SUCCESS;
case 'i':
System.out.println(getVersionString());
return ReturnCode.SUCCESS;
case 'l':
//call to method that validates the path to the log file
//and sets up the file to store the client side log file
String logFileParameter = opts.getValStr();
if (logFileParameter != null && logFileParameter.length() > 0) {
logFileName = validateLogFile(logFileParameter, null);
} else {
logFileName = validateLogFile(logFileName, null);
}
userSpecifiedLog = true;
properties.setProperty("pig.logfile", (logFileName == null? "": logFileName));
break;
case 'm':
paramFiles.add(opts.getValStr());
break;
case 'M':
// turns off multiquery optimization
properties.setProperty("opt.multiquery",""+false);
break;
case 'N':
properties.setProperty(PigConfiguration.OPT_FETCH,""+false);
break;
case 'p':
params.add(opts.getValStr());
break;
case 'r':
// currently only used for parameter substitution
// will be extended in the future
dryrun = true;
break;
case 't':
disabledOptimizerRules.add(opts.getValStr());
break;
case 'v':
properties.setProperty(VERBOSE, ""+true);
verbose = true;
break;
case 'w':
properties.setProperty("aggregate.warning", ""+false);
break;
case 'x':
properties.setProperty("exectype", opts.getValStr());
break;
case 'P':
{
InputStream inputStream = null;
try {
FileLocalizer.FetchFileRet localFileRet = FileLocalizer.fetchFile(properties, opts.getValStr());
inputStream = new BufferedInputStream(new FileInputStream(localFileRet.file));
properties.load(inputStream) ;
} catch (IOException e) {
throw new RuntimeException("Unable to parse properties file '" + opts.getValStr() + "'");
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException e) {
}
}
}
}
break;
default: {
Character cc = Character.valueOf(opt);
throw new AssertionError("Unhandled option " + cc.toString());
}
}
}
// create the context with the parameter
PigContext pigContext = new PigContext(properties);
// create the static script state object
ScriptState scriptState = pigContext.getExecutionEngine().instantiateScriptState();
String commandLine = LoadFunc.join((AbstractList<String>)Arrays.asList(args), " ");
scriptState.setCommandLine(commandLine);
if (listener != null) {
scriptState.registerListener(listener);
} else {
scriptState.registerListener(new com.netflix.lipstick.listeners.LipstickPPNL());
}
ScriptState.start(scriptState);
pigContext.getProperties().setProperty("pig.cmd.args", commandLine);
if(logFileName == null && !userSpecifiedLog) {
logFileName = validateLogFile(properties.getProperty("pig.logfile"), null);
}
pigContext.getProperties().setProperty("pig.logfile", (logFileName == null? "": logFileName));
// configure logging
configureLog4J(properties, pigContext);
log.info(getVersionString().replace("\n", ""));
if(logFileName != null) {
log.info("Logging error messages to: " + logFileName);
}
deleteTempFiles = Boolean.valueOf(properties.getProperty(
PigConfiguration.PIG_DELETE_TEMP_FILE, "true"));
if( ! Boolean.valueOf(properties.getProperty(PROP_FILT_SIMPL_OPT, "false"))){
//turn off if the user has not explicitly turned on this optimization
disabledOptimizerRules.add("FilterLogicExpressionSimplifier");
}
pigContext.getProperties().setProperty(PigImplConstants.PIG_OPTIMIZER_RULES_KEY,
ObjectSerializer.serialize(disabledOptimizerRules));
PigContext.setClassLoader(pigContext.createCl(null));
// construct the parameter substitution preprocessor
LipstickGrunt grunt = null;
BufferedReader in;
String substFile = null;
paramFiles = fetchRemoteParamFiles(paramFiles, properties);
pigContext.setParams(params);
pigContext.setParamFiles(paramFiles);
switch (mode) {
case FILE: {
String remainders[] = opts.getRemainingArgs();
if (remainders != null) {
pigContext.getProperties().setProperty(PigContext.PIG_CMD_ARGS_REMAINDERS,
ObjectSerializer.serialize(remainders));
}
FileLocalizer.FetchFileRet localFileRet = FileLocalizer.fetchFile(properties, file);
if (localFileRet.didFetch) {
properties.setProperty("pig.jars.relative.to.dfs", "true");
}
scriptState.setFileName(file);
if (embedded) {
return runEmbeddedScript(pigContext, localFileRet.file.getPath(), engine);
} else {
SupportedScriptLang type = determineScriptType(localFileRet.file.getPath());
if (type != null) {
return runEmbeddedScript(pigContext, localFileRet.file
.getPath(), type.name().toLowerCase());
}
}
//Reader is created by first loading "pig.load.default.statements" or .pigbootup file if available
in = new BufferedReader(new InputStreamReader(Utils.getCompositeStream(new FileInputStream(localFileRet.file), properties)));
// run parameter substitution preprocessor first
substFile = file + ".substituted";
pin = runParamPreprocessor(pigContext, in, substFile, debug || dryrun || checkScriptOnly);
if (dryrun) {
if (dryrun(substFile, pigContext)) {
log.info("Dry run completed. Substituted pig script is at "
+ substFile
+ ". Expanded pig script is at "
+ file + ".expanded");
} else {
log.info("Dry run completed. Substituted pig script is at "
+ substFile);
}
return ReturnCode.SUCCESS;
}
logFileName = validateLogFile(logFileName, file);
pigContext.getProperties().setProperty("pig.logfile", (logFileName == null? "": logFileName));
// Set job name based on name of the script
pigContext.getProperties().setProperty(PigContext.JOB_NAME,
"PigLatin:" +new File(file).getName()
);
if (!debug) {
new File(substFile).deleteOnExit();
}
scriptState.setScript(new File(file));
grunt = new LipstickGrunt(pin, pigContext);
gruntCalled = true;
if(checkScriptOnly) {
grunt.checkScript(substFile);
System.err.println(file + " syntax OK");
rc = ReturnCode.SUCCESS;
} else {
int results[] = grunt.exec();
rc = getReturnCodeForStats(results);
}
return rc;
}
case STRING: {
if(checkScriptOnly) {
System.err.println("ERROR:" +
"-c (-check) option is only valid " +
"when executing pig with a pig script file)");
return ReturnCode.ILLEGAL_ARGS;
}
// Gather up all the remaining arguments into a string and pass them into
// grunt.
StringBuffer sb = new StringBuffer();
String remainders[] = opts.getRemainingArgs();
for (int i = 0; i < remainders.length; i++) {
if (i != 0) sb.append(' ');
sb.append(remainders[i]);
}
sb.append('\n');
scriptState.setScript(sb.toString());
in = new BufferedReader(new StringReader(sb.toString()));
grunt = new LipstickGrunt(in, pigContext);
gruntCalled = true;
int results[] = grunt.exec();
return getReturnCodeForStats(results);
}
default:
break;
}
// If we're here, we don't know yet what they want. They may have just
// given us a jar to execute, they might have given us a pig script to
// execute, or they might have given us a dash (or nothing) which means to
// run grunt interactive.
String remainders[] = opts.getRemainingArgs();
if (remainders == null) {
if(checkScriptOnly) {
System.err.println("ERROR:" +
"-c (-check) option is only valid " +
"when executing pig with a pig script file)");
return ReturnCode.ILLEGAL_ARGS;
}
// Interactive
mode = ExecMode.SHELL;
//Reader is created by first loading "pig.load.default.statements" or .pigbootup file if available
ConsoleReader reader = new ConsoleReader(Utils.getCompositeStream(System.in, properties), new OutputStreamWriter(System.out));
reader.setDefaultPrompt("grunt> ");
final String HISTORYFILE = ".pig_history";
String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE;
reader.setHistory(new History(new File(historyFile)));
ConsoleReaderInputStream inputStream = new ConsoleReaderInputStream(reader);
grunt = new LipstickGrunt(new BufferedReader(new InputStreamReader(inputStream)), pigContext);
grunt.setConsoleReader(reader);
gruntCalled = true;
grunt.run();
return ReturnCode.SUCCESS;
} else {
pigContext.getProperties().setProperty(PigContext.PIG_CMD_ARGS_REMAINDERS, ObjectSerializer.serialize(remainders));
// They have a pig script they want us to run.
mode = ExecMode.FILE;
FileLocalizer.FetchFileRet localFileRet = FileLocalizer.fetchFile(properties, remainders[0]);
if (localFileRet.didFetch) {
properties.setProperty("pig.jars.relative.to.dfs", "true");
}
scriptState.setFileName(remainders[0]);
if (embedded) {
return runEmbeddedScript(pigContext, localFileRet.file.getPath(), engine);
} else {
SupportedScriptLang type = determineScriptType(localFileRet.file.getPath());
if (type != null) {
return runEmbeddedScript(pigContext, localFileRet.file
.getPath(), type.name().toLowerCase());
}
}
//Reader is created by first loading "pig.load.default.statements" or .pigbootup file if available
InputStream seqInputStream = Utils.getCompositeStream(new FileInputStream(localFileRet.file), properties);
in = new BufferedReader(new InputStreamReader(seqInputStream));
// run parameter substitution preprocessor first
substFile = remainders[0] + ".substituted";
pin = runParamPreprocessor(pigContext, in, substFile, debug || dryrun || checkScriptOnly);
if (dryrun) {
if (dryrun(substFile, pigContext)) {
log.info("Dry run completed. Substituted pig script is at "
+ substFile
+ ". Expanded pig script is at "
+ remainders[0] + ".expanded");
} else {
log.info("Dry run completed. Substituted pig script is at "
+ substFile);
}
return ReturnCode.SUCCESS;
}
logFileName = validateLogFile(logFileName, remainders[0]);
pigContext.getProperties().setProperty("pig.logfile", (logFileName == null? "": logFileName));
if (!debug) {
new File(substFile).deleteOnExit();
}
// Set job name based on name of the script
pigContext.getProperties().setProperty(PigContext.JOB_NAME,
"PigLatin:" +new File(remainders[0]).getName()
);
scriptState.setScript(localFileRet.file);
grunt = new LipstickGrunt(pin, pigContext);
gruntCalled = true;
if(checkScriptOnly) {
grunt.checkScript(substFile);
System.err.println(remainders[0] + " syntax OK");
rc = ReturnCode.SUCCESS;
} else {
int results[] = grunt.exec();
rc = getReturnCodeForStats(results);
}
return rc;
}
// Per Utkarsh and Chris invocation of jar file via pig depricated.
} catch (ParseException e) {
usage();
rc = ReturnCode.PARSE_EXCEPTION;
PigStatsUtil.setErrorMessage(e.getMessage());
PigStatsUtil.setErrorThrowable(e);
} catch (org.apache.pig.tools.parameters.ParseException e) {
// usage();
rc = ReturnCode.PARSE_EXCEPTION;
PigStatsUtil.setErrorMessage(e.getMessage());
PigStatsUtil.setErrorThrowable(e);
} catch (IOException e) {
if (e instanceof PigException) {
PigException pe = (PigException)e;
rc = (pe.retriable()) ? ReturnCode.RETRIABLE_EXCEPTION
: ReturnCode.PIG_EXCEPTION;
PigStatsUtil.setErrorMessage(pe.getMessage());
PigStatsUtil.setErrorCode(pe.getErrorCode());
} else {
rc = ReturnCode.IO_EXCEPTION;
PigStatsUtil.setErrorMessage(e.getMessage());
}
PigStatsUtil.setErrorThrowable(e);
if(!gruntCalled) {
LogUtils.writeLog(e, logFileName, log, verbose, "Error before Pig is launched");
}
} catch (Throwable e) {
rc = ReturnCode.THROWABLE_EXCEPTION;
PigStatsUtil.setErrorMessage(e.getMessage());
PigStatsUtil.setErrorThrowable(e);
if(!gruntCalled) {
LogUtils.writeLog(e, logFileName, log, verbose, "Error before Pig is launched");
}
} finally {
if (deleteTempFiles) {
// clear temp files
FileLocalizer.deleteTempFiles();
}
PerformanceTimerFactory.getPerfTimerFactory().dumpTimers();
}
return rc;
}
protected static PigProgressNotificationListener makeListener(Properties properties) {
try {
return PigContext.instantiateObjectFromParams(
ConfigurationUtil.toConfiguration(properties),
PROGRESS_NOTIFICATION_LISTENER_KEY,
PROGRESS_NOTIFICATION_LISTENER_ARG_KEY,
PigProgressNotificationListener.class);
} catch (ExecException e) {
throw new RuntimeException(e);
}
}
private static int getReturnCodeForStats(int[] stats) {
return (stats[1] == 0) ? ReturnCode.SUCCESS // no failed jobs
: (stats[0] == 0) ? ReturnCode.FAILURE // no succeeded jobs
: ReturnCode.PARTIAL_FAILURE; // some jobs have failed
}
public static boolean dryrun(String scriptFile, PigContext pigContext)
throws RecognitionException, IOException {
BufferedReader rd = new BufferedReader(new FileReader(scriptFile));
DryRunGruntParser dryrun = new DryRunGruntParser(rd, scriptFile,
pigContext);
boolean hasMacro = dryrun.parseStopOnError();
if (hasMacro) {
String expandedFile = scriptFile.replace(".substituted",
".expanded");
BufferedWriter fw = new BufferedWriter(new FileWriter(expandedFile));
fw.append(dryrun.getResult());
fw.close();
}
return hasMacro;
}
//TODO jz: log4j.properties should be used instead
private static void configureLog4J(Properties properties, PigContext pigContext) {
// TODO Add a file appender for the logs
// TODO Need to create a property in the properties file for it.
// sgroschupf, 25Feb2008: this method will be obsolete with PIG-115.
String log4jconf = properties.getProperty(LOG4J_CONF);
String trueString = "true";
boolean brief = trueString.equalsIgnoreCase(properties.getProperty(BRIEF));
Level logLevel = Level.INFO;
String logLevelString = properties.getProperty(DEBUG);
if (logLevelString != null){
logLevel = Level.toLevel(logLevelString, Level.INFO);
}
Properties props = new Properties();
FileReader propertyReader = null;
if (log4jconf != null) {
try {
propertyReader = new FileReader(log4jconf);
props.load(propertyReader);
}
catch (IOException e)
{
System.err.println("Warn: Cannot open log4j properties file, use default");
}
finally
{
if (propertyReader != null) try {propertyReader.close();} catch(Exception e) {}
}
}
if (props.size() == 0) {
props.setProperty("log4j.logger.org.apache.pig", logLevel.toString());
if((logLevelString = System.getProperty("pig.logfile.level")) == null){
props.setProperty("log4j.rootLogger", "INFO, PIGCONSOLE");
}
else{
logLevel = Level.toLevel(logLevelString, Level.INFO);
props.setProperty("log4j.logger.org.apache.pig", logLevel.toString());
props.setProperty("log4j.rootLogger", "INFO, PIGCONSOLE, F");
props.setProperty("log4j.appender.F","org.apache.log4j.RollingFileAppender");
props.setProperty("log4j.appender.F.File",properties.getProperty("pig.logfile"));
props.setProperty("log4j.appender.F.layout","org.apache.log4j.PatternLayout");
props.setProperty("log4j.appender.F.layout.ConversionPattern", brief ? "%m%n" : "%d [%t] %-5p %c - %m%n");
}
props.setProperty("log4j.appender.PIGCONSOLE","org.apache.log4j.ConsoleAppender");
props.setProperty("log4j.appender.PIGCONSOLE.target", "System.err");
props.setProperty("log4j.appender.PIGCONSOLE.layout","org.apache.log4j.PatternLayout");
props.setProperty("log4j.appender.PIGCONSOLE.layout.ConversionPattern", brief ? "%m%n" : "%d [%t] %-5p %c - %m%n");
}
PropertyConfigurator.configure(props);
logLevel = Logger.getLogger("org.apache.pig").getLevel();
if (logLevel==null) {
logLevel = Logger.getLogger("org.apache.pig").getEffectiveLevel();
}
Properties backendProps = pigContext.getLog4jProperties();
backendProps.setProperty("log4j.logger.org.apache.pig", logLevel.toString());
pigContext.setLog4jProperties(backendProps);
pigContext.setDefaultLogLevel(logLevel);
}
private static List<String> fetchRemoteParamFiles(List<String> paramFiles, Properties properties)
throws IOException {
List<String> paramFiles2 = new ArrayList<String>();
for (String param: paramFiles) {
FileLocalizer.FetchFileRet localFileRet = FileLocalizer.fetchFile(properties, param);
paramFiles2.add(localFileRet.file.getAbsolutePath());
}
return paramFiles2;
}
// returns the stream of final pig script to be passed to Grunt
private static BufferedReader runParamPreprocessor(PigContext context, BufferedReader origPigScript,
String scriptFile, boolean createFile)
throws org.apache.pig.tools.parameters.ParseException, IOException{
if (createFile) {
return context.doParamSubstitutionOutputToFile(origPigScript, scriptFile);
} else {
String substituted = context.doParamSubstitution(origPigScript);
return new BufferedReader(new StringReader(substituted));
}
}
/**
* Returns the major version of Pig being run.
*/
public static String getMajorVersion() {
return majorVersion;
}
/**
* Returns the major version of the Pig build being run.
*/
public static String getMinorVersion() {
return minorVersion;
}
/**
* Returns the patch version of the Pig build being run.
*/
public static String getPatchVersion() {
return patchVersion;
}
/**
* Returns the svn revision number of the Pig build being run.
*/
public static String getSvnRevision() {
return svnRevision;
}
/**
* Returns the built time of the Pig build being run.
*/
public static String getBuildTime() {
return buildTime;
}
private static String getVersionString() {
try {
return CharStreams.toString(new InputStreamReader(Main.class.getResourceAsStream("/lipstick_build.txt"),
"UTF-8"));
} catch (IOException e) {
return "Lipstick - version unknown";
}
}
/**
* Print usage string.
*/
public static void usage()
{
System.out.println("\n"+getVersionString()+"\n");
System.out.println("USAGE: Pig [options] [-] : Run interactively in grunt shell.");
System.out.println(" Pig [options] -e[xecute] cmd [cmd ...] : Run cmd(s).");
System.out.println(" Pig [options] [-f[ile]] file : Run cmds found in file.");
System.out.println(" options include:");
System.out.println(" -4, -log4jconf - Log4j configuration file, overrides log conf");
System.out.println(" -b, -brief - Brief logging (no timestamps)");
System.out.println(" -c, -check - Syntax check");
System.out.println(" -d, -debug - Debug level, INFO is default");
System.out.println(" -e, -execute - Commands to execute (within quotes)");
System.out.println(" -f, -file - Path to the script to execute");
System.out.println(" -g, -embedded - ScriptEngine classname or keyword for the ScriptEngine");
System.out.println(" -h, -help - Display this message. You can specify topic to get help for that topic.");
System.out.println(" properties is the only topic currently supported: -h properties.");
System.out.println(" -i, -version - Display version information");
System.out.println(" -l, -logfile - Path to client side log file; default is current working directory.");
System.out.println(" -m, -param_file - Path to the parameter file");
System.out.println(" -p, -param - Key value pair of the form param=val");
System.out.println(" -r, -dryrun - Produces script with substituted parameters. Script is not executed.");
System.out.println(" -t, -optimizer_off - Turn optimizations off. The following values are supported:");
System.out.println(" SplitFilter - Split filter conditions");
System.out.println(" PushUpFilter - Filter as early as possible");
System.out.println(" MergeFilter - Merge filter conditions");
System.out.println(" PushDownForeachFlatten - Join or explode as late as possible");
System.out.println(" LimitOptimizer - Limit as early as possible");
System.out.println(" ColumnMapKeyPrune - Remove unused data");
System.out.println(" AddForEach - Add ForEach to remove unneeded columns");
System.out.println(" MergeForEach - Merge adjacent ForEach");
System.out.println(" GroupByConstParallelSetter - Force parallel 1 for \"group all\" statement");
System.out.println(" All - Disable all optimizations");
System.out.println(" All optimizations listed here are enabled by default. Optimization values are case insensitive.");
System.out.println(" -v, -verbose - Print all error messages to screen");
System.out.println(" -w, -warning - Turn warning logging on; also turns warning aggregation off");
System.out.println(" -x, -exectype - Set execution mode: local|mapreduce, default is mapreduce.");
System.out.println(" -F, -stop_on_failure - Aborts execution on the first failed job; default is off");
System.out.println(" -M, -no_multiquery - Turn multiquery optimization off; default is on");
System.out.println(" -N, -no_fetch - Turn fetch optimization off; default is on");
System.out.println(" -P, -propertyFile - Path to property file");
System.out.println(" -printCmdDebug - Overrides anything else and prints the actual command used to run Pig, including");
System.out.println(" any environment variables that are set by the pig command.");
}
public static void printProperties(){
System.out.println("The following properties are supported:");
System.out.println(" Logging:");
System.out.println(" verbose=true|false; default is false. This property is the same as -v switch");
System.out.println(" brief=true|false; default is false. This property is the same as -b switch");
System.out.println(" debug=OFF|ERROR|WARN|INFO|DEBUG; default is INFO. This property is the same as -d switch");
System.out.println(" aggregate.warning=true|false; default is true. If true, prints count of warnings");
System.out.println(" of each type rather than logging each warning.");
System.out.println(" Performance tuning:");
System.out.println(" pig.cachedbag.memusage=<mem fraction>; default is 0.2 (20% of all memory).");
System.out.println(" Note that this memory is shared across all large bags used by the application.");
System.out.println(" pig.skewedjoin.reduce.memusagea=<mem fraction>; default is 0.3 (30% of all memory).");
System.out.println(" Specifies the fraction of heap available for the reducer to perform the join.");
System.out.println(" pig.exec.nocombiner=true|false; default is false. ");
System.out.println(" Only disable combiner as a temporary workaround for problems.");
System.out.println(" opt.multiquery=true|false; multiquery is on by default.");
System.out.println(" Only disable multiquery as a temporary workaround for problems.");
System.out.println(" opt.fetch=true|false; fetch is on by default.");
System.out.println(" Scripts containing Filter, Foreach, Limit, Stream, and Union can be dumped without MR jobs.");
System.out.println(" pig.tmpfilecompression=true|false; compression is off by default.");
System.out.println(" Determines whether output of intermediate jobs is compressed.");
System.out.println(" pig.tmpfilecompression.codec=lzo|gzip; default is gzip.");
System.out.println(" Used in conjunction with pig.tmpfilecompression. Defines compression type.");
System.out.println(" pig.noSplitCombination=true|false. Split combination is on by default.");
System.out.println(" Determines if multiple small files are combined into a single map.");
System.out.println(" pig.exec.mapPartAgg=true|false. Default is false.");
System.out.println(" Determines if partial aggregation is done within map phase, ");
System.out.println(" before records are sent to combiner.");
System.out.println(" pig.exec.mapPartAgg.minReduction=<min aggregation factor>. Default is 10.");
System.out.println(" If the in-map partial aggregation does not reduce the output num records");
System.out.println(" by this factor, it gets disabled.");
System.out.println(" " + PROP_FILT_SIMPL_OPT + "=true|false; Default is false.");
System.out.println(" Enable optimizer rules to simplify filter expressions.");
System.out.println(" Miscellaneous:");
System.out.println(" exectype=mapreduce|local; default is mapreduce. This property is the same as -x switch");
System.out.println(" pig.additional.jars=<colon seperated list of jars>. Used in place of register command.");
System.out.println(" udf.import.list=<comma seperated list of imports>. Used to avoid package names in UDF.");
System.out.println(" stop.on.failure=true|false; default is false. Set to true to terminate on the first error.");
System.out.println(" pig.datetime.default.tz=<UTC time offset>. e.g. +08:00. Default is the default timezone of the host.");
System.out.println(" Determines the timezone used to handle datetime datatype and UDFs. ");
System.out.println("Additionally, any Hadoop property can be specified.");
}
private static String validateLogFile(String logFileName, String scriptName) {
String strippedDownScriptName = null;
if(scriptName != null) {
File scriptFile = new File(scriptName);
if(!scriptFile.isDirectory()) {
String scriptFileAbsPath;
try {
scriptFileAbsPath = scriptFile.getCanonicalPath();
strippedDownScriptName = getFileFromCanonicalPath(scriptFileAbsPath);
} catch (IOException ioe) {
log.warn("Could not compute canonical path to the script file " + ioe.getMessage());
strippedDownScriptName = null;
}
}
}
String defaultLogFileName = (strippedDownScriptName == null ? "pig_" : strippedDownScriptName) + new Date().getTime() + ".log";
File logFile;
if(logFileName != null) {
logFile = new File(logFileName);
//Check if the file name is a directory
//append the default file name to the file
if(logFile.isDirectory()) {
if(logFile.canWrite()) {
try {
logFileName = logFile.getCanonicalPath() + File.separator + defaultLogFileName;
} catch (IOException ioe) {
log.warn("Could not compute canonical path to the log file " + ioe.getMessage());
return null;
}
return logFileName;
} else {
log.warn("Need write permission in the directory: " + logFileName + " to create log file.");
return null;
}
} else {
//we have a relative path or an absolute path to the log file
//check if we can write to the directory where this file is/will be stored
if (logFile.exists()) {
if(logFile.canWrite()) {
try {
logFileName = new File(logFileName).getCanonicalPath();
} catch (IOException ioe) {
log.warn("Could not compute canonical path to the log file " + ioe.getMessage());
return null;
}
return logFileName;
} else {
//do not have write permissions for the log file
//bail out with an error message
log.warn("Cannot write to file: " + logFileName + ". Need write permission.");
return logFileName;
}
} else {
logFile = logFile.getParentFile();
if(logFile != null) {
//if the directory is writable we are good to go
if(logFile.canWrite()) {
try {
logFileName = new File(logFileName).getCanonicalPath();
} catch (IOException ioe) {
log.warn("Could not compute canonical path to the log file " + ioe.getMessage());
return null;
}
return logFileName;
} else {
log.warn("Need write permission in the directory: " + logFile + " to create log file.");
return logFileName;
}
}//end if logFile != null else is the default in fall through
}//end else part of logFile.exists()
}//end else part of logFile.isDirectory()
}//end if logFileName != null
//file name is null or its in the current working directory
//revert to the current working directory
String currDir = System.getProperty("user.dir");
logFile = new File(currDir);
logFileName = currDir + File.separator + (logFileName == null? defaultLogFileName : logFileName);
if(logFile.canWrite()) {
return logFileName;
}
log.warn("Cannot write to log file: " + logFileName);
return null;
}
private static String getFileFromCanonicalPath(String canonicalPath) {
return canonicalPath.substring(canonicalPath.lastIndexOf(File.separator));
}
private static SupportedScriptLang determineScriptType(String file)
throws IOException {
return ScriptEngine.getSupportedScriptLang(file);
}
private static int runEmbeddedScript(PigContext pigContext, String file, String engine)
throws IOException {
log.info("Run embedded script: " + engine);
pigContext.connect();
ScriptEngine scriptEngine = ScriptEngine.getInstance(engine);
Map<String, List<PigStats>> statsMap = scriptEngine.run(pigContext, file);
PigStatsUtil.setStatsMap(statsMap);
int failCount = 0;
int totalCount = 0;
for (List<PigStats> lst : statsMap.values()) {
if (lst != null && !lst.isEmpty()) {
for (PigStats stats : lst) {
if (!stats.isSuccessful()) failCount++;
totalCount++;
}
}
}
return (totalCount > 0 && failCount == totalCount) ? ReturnCode.FAILURE
: (failCount > 0) ? ReturnCode.PARTIAL_FAILURE
: ReturnCode.SUCCESS;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.jmx;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicLong;
import javax.management.MBeanNotificationInfo;
import javax.management.Notification;
import javax.management.NotificationBroadcasterSupport;
import javax.management.ObjectName;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.ConfigurationFactory;
import org.apache.logging.log4j.core.config.ConfigurationFactory.ConfigurationSource;
import org.apache.logging.log4j.core.helpers.Assert;
import org.apache.logging.log4j.core.helpers.Charsets;
import org.apache.logging.log4j.core.helpers.Closer;
import org.apache.logging.log4j.status.StatusLogger;
/**
* Implementation of the {@code LoggerContextAdminMBean} interface.
*/
public class LoggerContextAdmin extends NotificationBroadcasterSupport
implements LoggerContextAdminMBean, PropertyChangeListener {
private static final int PAGE = 4 * 1024;
private static final int TEXT_BUFFER = 64 * 1024;
private static final int BUFFER_SIZE = 2048;
private static final StatusLogger LOGGER = StatusLogger.getLogger();
private final AtomicLong sequenceNo = new AtomicLong();
private final ObjectName objectName;
private final LoggerContext loggerContext;
private String customConfigText;
/**
* Constructs a new {@code LoggerContextAdmin} with the {@code Executor} to
* be used for sending {@code Notification}s asynchronously to listeners.
*
* @param executor used to send notifications asynchronously
* @param loggerContext the instrumented object
*/
public LoggerContextAdmin(final LoggerContext loggerContext, final Executor executor) {
super(executor, createNotificationInfo());
this.loggerContext = Assert.isNotNull(loggerContext, "loggerContext");
try {
final String ctxName = Server.escape(loggerContext.getName());
final String name = String.format(PATTERN, ctxName);
objectName = new ObjectName(name);
} catch (final Exception e) {
throw new IllegalStateException(e);
}
loggerContext.addPropertyChangeListener(this);
}
private static MBeanNotificationInfo createNotificationInfo() {
final String[] notifTypes = new String[] {//
NOTIF_TYPE_RECONFIGURED };
final String name = Notification.class.getName();
final String description = "Configuration reconfigured";
return new MBeanNotificationInfo(notifTypes, name, description);
}
@Override
public String getStatus() {
return loggerContext.getStatus().toString();
}
@Override
public String getName() {
return loggerContext.getName();
}
private Configuration getConfig() {
return loggerContext.getConfiguration();
}
@Override
public String getConfigLocationURI() {
if (loggerContext.getConfigLocation() != null) {
return String.valueOf(loggerContext.getConfigLocation());
}
if (getConfigName() != null) {
return String.valueOf(new File(getConfigName()).toURI());
}
return "";
}
@Override
public void setConfigLocationURI(final String configLocation)
throws URISyntaxException, IOException {
LOGGER.debug("---------");
LOGGER.debug("Remote request to reconfigure using location "
+ configLocation);
final URI uri = new URI(configLocation);
// validate the location first: invalid location will result in
// default configuration being configured, try to avoid that...
uri.toURL().openStream().close();
loggerContext.setConfigLocation(uri);
LOGGER.debug("Completed remote request to reconfigure.");
}
@Override
public void propertyChange(final PropertyChangeEvent evt) {
if (!LoggerContext.PROPERTY_CONFIG.equals(evt.getPropertyName())) {
return;
}
// erase custom text if new configuration was read from a location
if (loggerContext.getConfiguration().getName() != null) {
customConfigText = null;
}
final Notification notif = new Notification(NOTIF_TYPE_RECONFIGURED,
getObjectName(), nextSeqNo(), now(), null);
sendNotification(notif);
}
@Override
public String getConfigText() throws IOException {
return getConfigText(Charsets.UTF_8.name());
}
@Override
public String getConfigText(final String charsetName) throws IOException {
if (customConfigText != null) {
return customConfigText;
}
try {
final Charset charset = Charset.forName(charsetName);
return readContents(new URI(getConfigLocationURI()), charset);
} catch (final Exception ex) {
final StringWriter sw = new StringWriter(BUFFER_SIZE);
ex.printStackTrace(new PrintWriter(sw));
return sw.toString();
}
}
@Override
public void setConfigText(final String configText, final String charsetName) {
final String old = customConfigText;
customConfigText = Assert.isNotNull(configText, "configText");
LOGGER.debug("---------");
LOGGER.debug("Remote request to reconfigure from config text.");
try {
final InputStream in = new ByteArrayInputStream(
configText.getBytes(charsetName));
final ConfigurationSource source = new ConfigurationSource(in);
final Configuration updated = ConfigurationFactory.getInstance()
.getConfiguration(source);
loggerContext.start(updated);
LOGGER.debug("Completed remote request to reconfigure from config text.");
} catch (final Exception ex) {
customConfigText = old;
final String msg = "Could not reconfigure from config text";
LOGGER.error(msg, ex);
throw new IllegalArgumentException(msg, ex);
}
}
/**
*
* @param uri
* @param charset MUST not be null
* @return
* @throws IOException
*/
private String readContents(final URI uri, final Charset charset) throws IOException {
InputStream in = null;
Reader reader = null;
try {
in = uri.toURL().openStream();
reader = new InputStreamReader(in, charset);
final StringBuilder result = new StringBuilder(TEXT_BUFFER);
final char[] buff = new char[PAGE];
int count = -1;
while ((count = reader.read(buff)) >= 0) {
result.append(buff, 0, count);
}
return result.toString();
} finally {
Closer.closeSilent(in);
Closer.closeSilent(reader);
}
}
@Override
public String getConfigName() {
return getConfig().getName();
}
@Override
public String getConfigClassName() {
return getConfig().getClass().getName();
}
@Override
public String getConfigFilter() {
return String.valueOf(getConfig().getFilter());
}
@Override
public String getConfigMonitorClassName() {
return getConfig().getConfigurationMonitor().getClass().getName();
}
@Override
public Map<String, String> getConfigProperties() {
return getConfig().getProperties();
}
/**
* Returns the {@code ObjectName} of this mbean.
*
* @return the {@code ObjectName}
* @see LoggerContextAdminMBean#PATTERN
*/
public ObjectName getObjectName() {
return objectName;
}
private long nextSeqNo() {
return sequenceNo.getAndIncrement();
}
private long now() {
return System.currentTimeMillis();
}
}
| |
package io.dropwizard.setup;
import static com.google.common.base.Preconditions.checkNotNull;
import com.codahale.metrics.JmxReporter;
import io.dropwizard.Application;
import io.dropwizard.Bundle;
import io.dropwizard.Configuration;
import io.dropwizard.ConfiguredBundle;
import io.dropwizard.cli.Command;
import io.dropwizard.cli.ConfiguredCommand;
import io.dropwizard.configuration.ConfigurationFactoryFactory;
import io.dropwizard.configuration.ConfigurationSourceProvider;
import io.dropwizard.configuration.DefaultConfigurationFactoryFactory;
import io.dropwizard.configuration.FileConfigurationSourceProvider;
import io.dropwizard.jackson.Jackson;
import java.lang.management.ManagementFactory;
import java.util.List;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.jvm.BufferPoolMetricSet;
import com.codahale.metrics.jvm.GarbageCollectorMetricSet;
import com.codahale.metrics.jvm.MemoryUsageGaugeSet;
import com.codahale.metrics.jvm.ThreadStatesGaugeSet;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import javax.validation.Validation;
import javax.validation.ValidatorFactory;
/**
* The pre-start application environment, containing everything required to bootstrap a Dropwizard
* command.
*
* @param <T> the configuration type
*/
public class Bootstrap<T extends Configuration> {
private final Application<T> application;
private final ObjectMapper objectMapper;
private final List<Bundle> bundles;
private final List<ConfiguredBundle<? super T>> configuredBundles;
private final List<Command> commands;
private final MetricRegistry metricRegistry;
private final ValidatorFactory validatorFactory;
private ConfigurationSourceProvider configurationSourceProvider;
private ClassLoader classLoader;
private ConfigurationFactoryFactory<T> configurationFactoryFactory;
/**
* Creates a new {@link Bootstrap} for the given application.
*
* @param application a Dropwizard {@link Application}
*/
public Bootstrap(Application<T> application) {
this.application = application;
this.objectMapper = Jackson.newObjectMapper();
this.bundles = Lists.newArrayList();
this.configuredBundles = Lists.newArrayList();
this.commands = Lists.newArrayList();
this.metricRegistry = new MetricRegistry();
this.validatorFactory = Validation.buildDefaultValidatorFactory();
getMetricRegistry().register("jvm.buffers", new BufferPoolMetricSet(ManagementFactory
.getPlatformMBeanServer()));
getMetricRegistry().register("jvm.gc", new GarbageCollectorMetricSet());
getMetricRegistry().register("jvm.memory", new MemoryUsageGaugeSet());
getMetricRegistry().register("jvm.threads", new ThreadStatesGaugeSet());
JmxReporter.forRegistry(metricRegistry).build().start();
this.configurationSourceProvider = new FileConfigurationSourceProvider();
this.classLoader = Thread.currentThread().getContextClassLoader();
this.configurationFactoryFactory = new DefaultConfigurationFactoryFactory<T>();
}
/**
* Returns the bootstrap's {@link Application}.
*/
public Application<T> getApplication() {
return application;
}
/**
* Returns the bootstrap's {@link ConfigurationSourceProvider}.
*/
public ConfigurationSourceProvider getConfigurationSourceProvider() {
return configurationSourceProvider;
}
/**
* Sets the bootstrap's {@link ConfigurationSourceProvider}.
*/
public void setConfigurationSourceProvider(ConfigurationSourceProvider provider) {
this.configurationSourceProvider = checkNotNull(provider);
}
/**
* Returns the bootstrap's class loader.
*/
public ClassLoader getClassLoader() {
return classLoader;
}
/**
* Sets the bootstrap's class loader.
*/
public void setClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
}
/**
* Adds the given bundle to the bootstrap.
*
* @param bundle a {@link Bundle}
*/
public void addBundle(Bundle bundle) {
bundle.initialize(this);
bundles.add(bundle);
}
/**
* Adds the given bundle to the bootstrap.
*
* @param bundle a {@link ConfiguredBundle}
*/
public void addBundle(ConfiguredBundle<? super T> bundle) {
bundle.initialize(this);
configuredBundles.add(bundle);
}
/**
* Adds the given command to the bootstrap.
*
* @param command a {@link Command}
*/
public void addCommand(Command command) {
commands.add(command);
}
/**
* Adds the given command to the bootstrap.
*
* @param command a {@link ConfiguredCommand}
*/
public void addCommand(ConfiguredCommand<T> command) {
commands.add(command);
}
/**
* Returns the bootstrap's {@link ObjectMapper}.
*/
public ObjectMapper getObjectMapper() {
return objectMapper;
}
/**
* Runs the bootstrap's bundles with the given configuration and environment.
*
* @param configuration the parsed configuration
* @param environment the application environment
* @throws Exception if a bundle throws an exception
*/
public void run(T configuration, Environment environment) throws Exception {
for (Bundle bundle : bundles) {
bundle.run(environment);
}
for (ConfiguredBundle<? super T> bundle : configuredBundles) {
bundle.run(configuration, environment);
}
}
/**
* Returns the application's commands.
*/
public ImmutableList<Command> getCommands() {
return ImmutableList.copyOf(commands);
}
/**
* Returns the application's metrics.
*/
public MetricRegistry getMetricRegistry() {
return metricRegistry;
}
/**
* Returns the application's validator factory.
*/
public ValidatorFactory getValidatorFactory() {
return validatorFactory;
}
public ConfigurationFactoryFactory<T> getConfigurationFactoryFactory() {
return configurationFactoryFactory;
}
public void setConfigurationFactoryFactory(ConfigurationFactoryFactory<T> configurationFactoryFactory) {
this.configurationFactoryFactory = configurationFactoryFactory;
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc. and/or its affiliates, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hibernate.validator.test.internal.constraintvalidators.hv;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
import org.hibernate.validator.HibernateValidator;
import org.hibernate.validator.HibernateValidatorConfiguration;
import org.hibernate.validator.cfg.ConstraintMapping;
import org.hibernate.validator.cfg.defs.Mod10CheckDef;
import org.hibernate.validator.constraints.Mod10Check;
import org.hibernate.validator.internal.constraintvalidators.hv.Mod10CheckValidator;
import org.hibernate.validator.internal.util.annotationfactory.AnnotationDescriptor;
import org.hibernate.validator.internal.util.annotationfactory.AnnotationFactory;
import org.hibernate.validator.testutil.MyCustomStringImpl;
import org.hibernate.validator.testutil.TestForIssue;
import org.testng.annotations.Test;
import static java.lang.annotation.ElementType.FIELD;
import static org.hibernate.validator.testutil.ConstraintViolationAssert.assertNumberOfViolations;
import static org.hibernate.validator.testutil.ValidatorUtil.getConfiguration;
import static org.testng.Assert.assertFalse;
import static org.testng.AssertJUnit.assertTrue;
/**
* Tests for the {@code Mod10CheckValidator}.
*
* @author Hardy Ferentschik
* @author Victor Rezende dos Santos
*/
public class Mod10CheckValidatorTest {
@Test(expectedExceptions = IllegalArgumentException.class)
public void testInvalidStartIndex() {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( -1, Integer.MAX_VALUE, -1, false );
validator.initialize( modCheck );
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testInvalidEndIndex() {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 0, -1, -1, false );
validator.initialize( modCheck );
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testEndIndexLessThanStartIndex() {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 5, 0, -1, false );
validator.initialize( modCheck );
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testInvalidCheckDigitIndex() {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 0, 10, 5, false );
validator.initialize( modCheck );
}
@Test
public void testFailOnNonNumeric() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 0, Integer.MAX_VALUE, -1, false );
validator.initialize( modCheck );
assertFalse( validator.isValid( new MyCustomStringImpl( "A79927398712" ), null ) );
}
@Test
public void testIgnoreNonNumeric() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 0, Integer.MAX_VALUE, -1, true );
validator.initialize( modCheck );
assertTrue( validator.isValid( new MyCustomStringImpl( "A79927398712" ), null ) );
}
@Test
public void testValidMod10() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 0, Integer.MAX_VALUE, -1, false );
validator.initialize( modCheck );
assertTrue( validator.isValid( "79927398712", null ) );
}
@Test
public void testValidMod10WithGivenRange() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 3, 13, -1, true );
validator.initialize( modCheck );
assertTrue( validator.isValid( "xxx-7992739871-x", null ) );
}
@Test
public void testValidMod10WithGivenRangeAndExplicitCheckDigit() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 3, 13, 15, true );
validator.initialize( modCheck );
assertTrue( validator.isValid( "xxx-799273987-x1x", null ) );
}
@Test
public void testValidMod10WithGivenRangeAndCheckDigitIndex() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 3, 12, 13, true );
validator.initialize( modCheck );
assertTrue( validator.isValid( "123-7992739871-2-456", null ) );
}
@Test
public void testInvalidMod10() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 0, Integer.MAX_VALUE, -1, false );
validator.initialize( modCheck );
assertFalse( validator.isValid( new MyCustomStringImpl( "79927398713" ), null ) );
}
@Test
@TestForIssue(jiraKey = "HV-813")
public void testValidEAN_GTIN_13() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 0, Integer.MAX_VALUE, -1, true );
validator.initialize( modCheck );
assertTrue( validator.isValid( new MyCustomStringImpl( "4 007630 00011 6" ), null ) );
assertTrue( validator.isValid( new MyCustomStringImpl( "1 234567 89012 8" ), null ) );
}
@Test
@TestForIssue(jiraKey = "HV-813")
public void testValidEAN_GTIN_14() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 0, Integer.MAX_VALUE, -1, true );
validator.initialize( modCheck );
assertTrue( validator.isValid( new MyCustomStringImpl( "0 40 07630 00011 6" ), null ) );
assertTrue( validator.isValid( new MyCustomStringImpl( "3 07 12345 00001 0" ), null ) );
}
@Test
@TestForIssue(jiraKey = "HV-813")
public void testValidEAN_14WithEAN_128() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 5, 22, -1, true );
validator.initialize( modCheck );
assertTrue( validator.isValid( new MyCustomStringImpl( "(01) 1 23 45678 90123 1" ), null ) );
}
@Test
@TestForIssue(jiraKey = "HV-813")
public void testValidISBN_13() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 0, Integer.MAX_VALUE, -1, true );
validator.initialize( modCheck );
assertTrue( validator.isValid( new MyCustomStringImpl( "978-85-61411-03-9" ), null ) );
assertTrue( validator.isValid( new MyCustomStringImpl( "978-1-4302-1957-6" ), null ) );
}
@Test
@TestForIssue(jiraKey = "HV-813")
public void testValidIdentcode() throws Exception {
Mod10CheckValidator validator = new Mod10CheckValidator();
Mod10Check modCheck = createMod10CheckAnnotation( 0, Integer.MAX_VALUE, -1, true, 4, 9 );
validator.initialize( modCheck );
assertTrue( validator.isValid( new MyCustomStringImpl( "56.310 243.031 3" ), null ) );
}
@Test
@TestForIssue(jiraKey = "HV-812")
public void testProgrammaticMod11Constraint() {
final HibernateValidatorConfiguration config = getConfiguration( HibernateValidator.class );
ConstraintMapping mapping = config.createConstraintMapping();
mapping.type( Product.class )
.property( "productNumber", FIELD )
.constraint(
new Mod10CheckDef()
.multiplier( 3 )
.weight( 1 )
.startIndex( 0 )
.endIndex( 12 )
.checkDigitIndex( -1 )
.ignoreNonDigitCharacters( true )
);
config.addMapping( mapping );
Validator validator = config.buildValidatorFactory().getValidator();
Product product = new Product( "P-79927398712" );
Set<ConstraintViolation<Product>> constraintViolations = validator.validate( product );
assertNumberOfViolations( constraintViolations, 0 );
}
private Mod10Check createMod10CheckAnnotation(int start, int end, int checkDigitIndex, boolean ignoreNonDigits, int multiplier, int weight) {
AnnotationDescriptor<Mod10Check> descriptor = new AnnotationDescriptor<Mod10Check>( Mod10Check.class );
descriptor.setValue( "startIndex", start );
descriptor.setValue( "endIndex", end );
descriptor.setValue( "checkDigitIndex", checkDigitIndex );
descriptor.setValue( "ignoreNonDigitCharacters", ignoreNonDigits );
descriptor.setValue( "multiplier", multiplier );
descriptor.setValue( "weight", weight );
return AnnotationFactory.create( descriptor );
}
private Mod10Check createMod10CheckAnnotation(int start, int end, int checkDigitIndex, boolean ignoreNonDigits) {
return this.createMod10CheckAnnotation( start, end, checkDigitIndex, ignoreNonDigits, 3, 1 );
}
private static class Product {
private final String productNumber;
private Product(String productNumber) {
this.productNumber = productNumber;
}
}
}
| |
/*
* Copyright 2013 Gunnar Kappei.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.getty.cdwa.cdwaLite;
/**
* A document containing one administrativeMetadata(@http://www.getty.edu/CDWA/CDWALite) element.
*
* This is a complex type.
*/
public interface AdministrativeMetadataDocument extends org.apache.xmlbeans.XmlObject
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(AdministrativeMetadataDocument.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s1F643FAF4399D1206A04583D585DB128").resolveHandle("administrativemetadata5112doctype");
/**
* Gets the "administrativeMetadata" element
*/
edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument.AdministrativeMetadata getAdministrativeMetadata();
/**
* Sets the "administrativeMetadata" element
*/
void setAdministrativeMetadata(edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument.AdministrativeMetadata administrativeMetadata);
/**
* Appends and returns a new empty "administrativeMetadata" element
*/
edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument.AdministrativeMetadata addNewAdministrativeMetadata();
/**
* An XML administrativeMetadata(@http://www.getty.edu/CDWA/CDWALite).
*
* This is a complex type.
*/
public interface AdministrativeMetadata extends org.apache.xmlbeans.XmlObject
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(AdministrativeMetadata.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s1F643FAF4399D1206A04583D585DB128").resolveHandle("administrativemetadatad6abelemtype");
/**
* Gets a List of "rightsWork" elements
*/
java.util.List<edu.getty.cdwa.cdwaLite.RightsWorkDocument.RightsWork> getRightsWorkList();
/**
* Gets array of all "rightsWork" elements
* @deprecated
*/
@Deprecated
edu.getty.cdwa.cdwaLite.RightsWorkDocument.RightsWork[] getRightsWorkArray();
/**
* Gets ith "rightsWork" element
*/
edu.getty.cdwa.cdwaLite.RightsWorkDocument.RightsWork getRightsWorkArray(int i);
/**
* Returns number of "rightsWork" element
*/
int sizeOfRightsWorkArray();
/**
* Sets array of all "rightsWork" element
*/
void setRightsWorkArray(edu.getty.cdwa.cdwaLite.RightsWorkDocument.RightsWork[] rightsWorkArray);
/**
* Sets ith "rightsWork" element
*/
void setRightsWorkArray(int i, edu.getty.cdwa.cdwaLite.RightsWorkDocument.RightsWork rightsWork);
/**
* Inserts and returns a new empty value (as xml) as the ith "rightsWork" element
*/
edu.getty.cdwa.cdwaLite.RightsWorkDocument.RightsWork insertNewRightsWork(int i);
/**
* Appends and returns a new empty value (as xml) as the last "rightsWork" element
*/
edu.getty.cdwa.cdwaLite.RightsWorkDocument.RightsWork addNewRightsWork();
/**
* Removes the ith "rightsWork" element
*/
void removeRightsWork(int i);
/**
* Gets the "recordWrap" element
*/
edu.getty.cdwa.cdwaLite.RecordWrapDocument.RecordWrap getRecordWrap();
/**
* True if has "recordWrap" element
*/
boolean isSetRecordWrap();
/**
* Sets the "recordWrap" element
*/
void setRecordWrap(edu.getty.cdwa.cdwaLite.RecordWrapDocument.RecordWrap recordWrap);
/**
* Appends and returns a new empty "recordWrap" element
*/
edu.getty.cdwa.cdwaLite.RecordWrapDocument.RecordWrap addNewRecordWrap();
/**
* Unsets the "recordWrap" element
*/
void unsetRecordWrap();
/**
* Gets the "resourceWrap" element
*/
edu.getty.cdwa.cdwaLite.ResourceWrapDocument.ResourceWrap getResourceWrap();
/**
* True if has "resourceWrap" element
*/
boolean isSetResourceWrap();
/**
* Sets the "resourceWrap" element
*/
void setResourceWrap(edu.getty.cdwa.cdwaLite.ResourceWrapDocument.ResourceWrap resourceWrap);
/**
* Appends and returns a new empty "resourceWrap" element
*/
edu.getty.cdwa.cdwaLite.ResourceWrapDocument.ResourceWrap addNewResourceWrap();
/**
* Unsets the "resourceWrap" element
*/
void unsetResourceWrap();
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument.AdministrativeMetadata newInstance() {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument.AdministrativeMetadata) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument.AdministrativeMetadata newInstance(org.apache.xmlbeans.XmlOptions options) {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument.AdministrativeMetadata) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
private Factory() { } // No instance of this class allowed
}
}
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument newInstance() {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument newInstance(org.apache.xmlbeans.XmlOptions options) {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
/** @param xmlAsString the string value to parse */
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(java.lang.String xmlAsString) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, null ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(java.lang.String xmlAsString, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, options ); }
/** @param file the file from which to load an xml document */
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(java.io.File file) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, null ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(java.io.File file, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, options ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(java.net.URL u) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, null ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(java.net.URL u, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, options ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(java.io.InputStream is) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, null ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(java.io.InputStream is, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, options ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(java.io.Reader r) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, null ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(java.io.Reader r, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, options ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(javax.xml.stream.XMLStreamReader sr) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, null ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(javax.xml.stream.XMLStreamReader sr, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, options ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(org.w3c.dom.Node node) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, null ); }
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(org.w3c.dom.Node node, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (edu.getty.cdwa.cdwaLite.AdministrativeMetadataDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, options ); }
private Factory() { } // No instance of this class allowed
}
}
| |
package liquibase.integration.commandline;
import liquibase.CatalogAndSchema;
import liquibase.command.CommandExecutionException;
import liquibase.command.CommandFactory;
import liquibase.command.core.DiffCommand;
import liquibase.command.core.DiffToChangeLogCommand;
import liquibase.command.core.GenerateChangeLogCommand;
import liquibase.database.Database;
import liquibase.database.DatabaseFactory;
import liquibase.database.OfflineConnection;
import liquibase.database.core.*;
import liquibase.diff.DiffStatusListener;
import liquibase.diff.compare.CompareControl;
import liquibase.diff.output.DiffOutputControl;
import liquibase.exception.*;
import liquibase.executor.ExecutorService;
import liquibase.logging.LogFactory;
import liquibase.resource.ClassLoaderResourceAccessor;
import liquibase.resource.ResourceAccessor;
import liquibase.snapshot.InvalidExampleException;
import liquibase.statement.core.RawSqlStatement;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.Schema;
import liquibase.util.StringUtils;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
/**
* Common Utility methods used in the CommandLine application and the Maven plugin.
* These methods were originally moved from {@link Main} so they could be shared.
*
* @author Peter Murray
*/
public class CommandLineUtils {
/**
* @deprecated Use ResourceAccessor version
*/
public static Database createDatabaseObject(ClassLoader classLoader,
String url,
String username,
String password,
String driver,
String defaultCatalogName,
String defaultSchemaName,
boolean outputDefaultCatalog,
boolean outputDefaultSchema,
String databaseClass,
String driverPropertiesFile,
String propertyProviderClass,
String liquibaseCatalogName,
String liquibaseSchemaName,
String databaseChangeLogTableName,
String databaseChangeLogLockTableName) throws DatabaseException {
return createDatabaseObject(new ClassLoaderResourceAccessor(classLoader), url, username, password, driver, defaultCatalogName, defaultSchemaName, outputDefaultCatalog, outputDefaultSchema, databaseClass, driverPropertiesFile, propertyProviderClass, liquibaseCatalogName, liquibaseSchemaName, databaseChangeLogTableName, databaseChangeLogLockTableName);
}
public static Database createDatabaseObject(ResourceAccessor resourceAccessor,
String url,
String username,
String password,
String driver,
String defaultCatalogName,
String defaultSchemaName,
boolean outputDefaultCatalog,
boolean outputDefaultSchema,
String databaseClass,
String driverPropertiesFile,
String propertyProviderClass,
String liquibaseCatalogName,
String liquibaseSchemaName,
String databaseChangeLogTableName,
String databaseChangeLogLockTableName) throws DatabaseException {
try {
liquibaseCatalogName = StringUtils.trimToNull(liquibaseCatalogName);
liquibaseSchemaName = StringUtils.trimToNull(liquibaseSchemaName);
defaultCatalogName = StringUtils.trimToNull(defaultCatalogName);
defaultSchemaName = StringUtils.trimToNull(defaultSchemaName);
databaseChangeLogTableName = StringUtils.trimToNull(databaseChangeLogTableName);
databaseChangeLogLockTableName = StringUtils.trimToNull(databaseChangeLogLockTableName);
Database database = DatabaseFactory.getInstance().openDatabase(url, username, password, driver, databaseClass, driverPropertiesFile, propertyProviderClass, resourceAccessor);
if (!database.supportsSchemas()) {
if (defaultSchemaName != null && defaultCatalogName == null) {
defaultCatalogName = defaultSchemaName;
}
if (liquibaseSchemaName != null && liquibaseCatalogName == null) {
liquibaseCatalogName = liquibaseSchemaName;
}
}
defaultCatalogName = StringUtils.trimToNull(defaultCatalogName);
defaultSchemaName = StringUtils.trimToNull(defaultSchemaName);
database.setDefaultCatalogName(defaultCatalogName);
database.setDefaultSchemaName(defaultSchemaName);
database.setOutputDefaultCatalog(outputDefaultCatalog);
database.setOutputDefaultSchema(outputDefaultSchema);
database.setLiquibaseCatalogName(liquibaseCatalogName);
database.setLiquibaseSchemaName(liquibaseSchemaName);
if (databaseChangeLogTableName!=null) {
database.setDatabaseChangeLogTableName(databaseChangeLogTableName);
if (databaseChangeLogLockTableName!=null) {
database.setDatabaseChangeLogLockTableName(databaseChangeLogLockTableName);
} else {
database.setDatabaseChangeLogLockTableName(databaseChangeLogTableName+"LOCK");
}
}
//Todo: move to database object methods in 4.0
initializeDatabase(username, defaultCatalogName, defaultSchemaName, database);
// ValidationErrors errors = database.validate();
// if (errors.hasErrors()) {
// throw new DatabaseException("Database validation failed: "+errors.toString());
// } else {
// for (String warning : errors.getWarningMessages()) {
// LogFactory.getInstance().getLog().warning(warning);
// }
// }
return database;
} catch (Exception e) {
throw new DatabaseException(e);
}
}
/**
* Executes RawSqlStatements particular to each database engine to set the default schema for the given Database
*
* @param username The username used for the connection. Used with MSSQL databases
* @param defaultCatalogName Catalog name and schema name are similar concepts. Used if defaultCatalogName is null.
* @param defaultSchemaName Catalog name and schema name are similar concepts. Catalog is used with Oracle, DB2 and MySQL, and takes
* precedence over the schema name.
* @param database Which Database object is affected by the initialization.
* @throws DatabaseException
*/
public static void initializeDatabase(String username, String defaultCatalogName, String defaultSchemaName, Database database) throws DatabaseException {
if ((defaultCatalogName != null || defaultSchemaName != null) && !(database.getConnection() instanceof OfflineConnection)) {
if (database instanceof OracleDatabase) {
String schema = defaultCatalogName;
if (schema == null) {
schema = defaultSchemaName;
}
ExecutorService.getInstance().getExecutor(database).execute(new RawSqlStatement("ALTER SESSION SET CURRENT_SCHEMA="+database.escapeObjectName(schema, Schema.class)));
} else if (database instanceof MSSQLDatabase && defaultSchemaName != null) {
boolean sql2005OrLater = true;
try {
sql2005OrLater = database.getDatabaseMajorVersion() >= 9;
} catch (DatabaseException e) {
// Assume SQL Server 2005 or later
}
if (sql2005OrLater && username != null) {
ExecutorService.getInstance().getExecutor(database).execute(new RawSqlStatement(
"IF USER_NAME() <> N'dbo'\r\n" +
"BEGIN\r\n" +
" DECLARE @sql [nvarchar](MAX)\r\n" +
" SELECT @sql = N'ALTER USER ' + QUOTENAME(USER_NAME()) + N' WITH DEFAULT_SCHEMA = " + database.escapeStringForDatabase(database.escapeObjectName(username, DatabaseObject.class)) + "'\r\n" +
" EXEC sp_executesql @sql\r\n" +
"END"));
} } else if (database instanceof PostgresDatabase && defaultSchemaName != null) {
ExecutorService.getInstance().getExecutor(database).execute(new RawSqlStatement("SET SEARCH_PATH TO " + database.escapeObjectName(defaultSchemaName, Schema.class)));
} else if (database instanceof AbstractDb2Database) {
String schema = defaultCatalogName;
if (schema == null) {
schema = defaultSchemaName;
}
ExecutorService.getInstance().getExecutor(database).execute(new RawSqlStatement("SET CURRENT SCHEMA "+schema));
} else if (database instanceof MySQLDatabase) {
String schema = defaultCatalogName;
if (schema == null) {
schema = defaultSchemaName;
}
ExecutorService.getInstance().getExecutor(database).execute(new RawSqlStatement("USE "+schema));
}
}
}
public static void doDiff(Database referenceDatabase, Database targetDatabase, String snapshotTypes) throws LiquibaseException {
doDiff(referenceDatabase, targetDatabase, snapshotTypes, null);
}
public static void doDiff(Database referenceDatabase, Database targetDatabase, String snapshotTypes, CompareControl.SchemaComparison[] schemaComparisons) throws LiquibaseException {
DiffCommand diffCommand = (DiffCommand) CommandFactory.getInstance().getCommand("diff");
diffCommand
.setReferenceDatabase(referenceDatabase)
.setTargetDatabase(targetDatabase)
.setCompareControl(new CompareControl(schemaComparisons, snapshotTypes))
.setSnapshotTypes(snapshotTypes)
.setOutputStream(System.out);
System.out.println("");
System.out.println("Diff Results:");
try {
diffCommand.execute();
} catch (CommandExecutionException e) {
throw new LiquibaseException(e);
}
}
public static void doDiffToChangeLog(String changeLogFile,
Database referenceDatabase,
Database targetDatabase,
DiffOutputControl diffOutputControl,
String snapshotTypes)
throws LiquibaseException, IOException, ParserConfigurationException {
doDiffToChangeLog(changeLogFile, referenceDatabase, targetDatabase, diffOutputControl, snapshotTypes, null);
}
public static void doDiffToChangeLog(String changeLogFile,
Database referenceDatabase,
Database targetDatabase,
DiffOutputControl diffOutputControl,
String snapshotTypes,
CompareControl.SchemaComparison[] schemaComparisons)
throws LiquibaseException, IOException, ParserConfigurationException {
DiffToChangeLogCommand command = (DiffToChangeLogCommand) CommandFactory.getInstance().getCommand("diffChangeLog");
command.setReferenceDatabase(referenceDatabase)
.setTargetDatabase(targetDatabase)
.setSnapshotTypes(snapshotTypes)
.setCompareControl(new CompareControl(schemaComparisons, snapshotTypes))
.setOutputStream(System.out);
command.setChangeLogFile(changeLogFile)
.setDiffOutputControl(diffOutputControl);
try {
command.execute();
} catch (CommandExecutionException e) {
throw new LiquibaseException(e);
}
}
public static void doGenerateChangeLog(String changeLogFile, Database originalDatabase, String catalogName, String schemaName, String snapshotTypes, String author, String context, String dataDir, DiffOutputControl diffOutputControl) throws DatabaseException, IOException, ParserConfigurationException, InvalidExampleException, LiquibaseException {
doGenerateChangeLog(changeLogFile, originalDatabase, new CatalogAndSchema[] {new CatalogAndSchema(catalogName, schemaName)}, snapshotTypes, author, context, dataDir, diffOutputControl);
}
public static void doGenerateChangeLog(String changeLogFile, Database originalDatabase, CatalogAndSchema[] schemas, String snapshotTypes, String author, String context, String dataDir, DiffOutputControl diffOutputControl) throws DatabaseException, IOException, ParserConfigurationException, InvalidExampleException, LiquibaseException {
CompareControl.SchemaComparison[] comparisons = new CompareControl.SchemaComparison[schemas.length];
int i=0;
for (CatalogAndSchema schema : schemas) {
comparisons[i++] = new CompareControl.SchemaComparison(schema, schema);
}
CompareControl compareControl = new CompareControl(comparisons, snapshotTypes);
diffOutputControl.setDataDir(dataDir);
GenerateChangeLogCommand command = (GenerateChangeLogCommand) CommandFactory.getInstance().getCommand("generateChangeLog");
command.setReferenceDatabase(originalDatabase)
.setSnapshotTypes(snapshotTypes)
.setOutputStream(System.out)
.setCompareControl(compareControl);
command.setChangeLogFile(changeLogFile)
.setDiffOutputControl(diffOutputControl);
command.setAuthor(author)
.setContext(context);
try {
command.execute();
} catch (CommandExecutionException e) {
throw new LiquibaseException(e);
}
}
private static class OutDiffStatusListener implements DiffStatusListener {
@Override
public void statusUpdate(String message) {
LogFactory.getLogger().info(message);
}
}
}
| |
package com.beerme.android.ui;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.Calendar;
import java.util.Locale;
import android.content.Intent;
import android.os.Bundle;
import androidx.fragment.app.DialogFragment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.EditText;
import android.widget.TextView;
import com.beerme.android.R;
import com.beerme.android.database.BeerNote;
import com.beerme.android.database.BeerNote.Source;
import com.beerme.android.utils.DatePickerFragment;
import com.beerme.android.utils.PkgPickerFragment;
import com.beerme.android.utils.RatingPickerFragment;
import com.beerme.android.utils.Utils;
public class EditableBeerNote extends BeerMeActivity implements
DatePickerFragment.DateSetter, PkgPickerFragment.PkgSetter,
RatingPickerFragment.RatingListener {
private BeerNote mNote = null;
private long mBeerId = -1;
private TextView mSampledView;
private TextView mScoreView;
private TextView mPkgView;
private EditText mPlaceView;
private TextView mAppRatingView;
private float mAppRating = 0;
private EditText mAppView;
private TextView mAroRatingView;
private float mAroRating = 0;
private EditText mAroView;
private TextView mMouRatingView;
private float mMouRating = 0;
private EditText mMouView;
private TextView mOvrRatingView;
private float mOvrRating = 0;
private EditText mNotesView;
private DateFormat mDateFormat = DateFormat.getDateInstance(
DateFormat.LONG, Locale.getDefault());
// LOW: AND0092: RFE: Allow user to choose rating method
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.editable_beer_note);
mSampledView = (TextView) findViewById(R.id.editablebeernote_sampled);
mSampledView.setOnClickListener(onSampledViewClicked);
mScoreView = (TextView) findViewById(R.id.editablebeernote_score);
mPkgView = (TextView) findViewById(R.id.editablebeernote_pkg);
mPkgView.setOnClickListener(onPkgViewClicked);
mPlaceView = (EditText) findViewById(R.id.editablebeernote_place);
mAppRatingView = (TextView) findViewById(R.id.editablebeernote_appRating);
mAppRatingView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
RatingPickerFragment newFragment = RatingPickerFragment
.newInstance(mAppRatingView.getId(), mNote == null ? 0
: mNote.getAppscore(), 3);
newFragment
.show(getSupportFragmentManager(), "appRatingPicker");
}
});
mAppView = (EditText) findViewById(R.id.editablebeernote_appearance);
mAroRatingView = (TextView) findViewById(R.id.editablebeernote_aroRating);
mAroRatingView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
RatingPickerFragment newFragment = RatingPickerFragment
.newInstance(mAroRatingView.getId(), mNote == null ? 0
: mNote.getAroscore(), 4);
newFragment
.show(getSupportFragmentManager(), "aroRatingPicker");
}
});
mAroView = (EditText) findViewById(R.id.editablebeernote_aroma);
mMouRatingView = (TextView) findViewById(R.id.editablebeernote_mouRating);
mMouRatingView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
RatingPickerFragment newFragment = RatingPickerFragment
.newInstance(mMouRatingView.getId(), mNote == null ? 0
: mNote.getMouscore(), 10);
newFragment
.show(getSupportFragmentManager(), "mouRatingPicker");
}
});
mMouView = (EditText) findViewById(R.id.editablebeernote_mouthfeel);
mOvrRatingView = (TextView) findViewById(R.id.editablebeernote_ovrRating);
mOvrRatingView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
RatingPickerFragment newFragment = RatingPickerFragment
.newInstance(mOvrRatingView.getId(), mNote == null ? 0
: mNote.getOvrscore(), 3);
newFragment
.show(getSupportFragmentManager(), "ovrRatingPicker");
}
});
mNotesView = (EditText) findViewById(R.id.editablebeernote_notes);
if (savedInstanceState != null) {
mNote = (BeerNote) savedInstanceState.getSerializable("note");
} else {
Intent intent = getIntent();
mNote = (BeerNote) intent.getSerializableExtra("note");
mBeerId = intent.getLongExtra("beerid", -1);
}
if (mNote != null) {
mBeerId = mNote.getBeerId();
mSampledView.setText(mNote.getSampled());
mScoreView.setText(getString(R.string.N_out_of_20_points,
Utils.toFrac(mNote.getScore())));
mPkgView.setText(mNote.getPkg());
mPlaceView.setText(mNote.getPlace());
mAppRating = mNote.getAppscore();
mAppRatingView.setText(getString(R.string.N_outOfThree,
Utils.toFrac(mAppRating)));
mAppView.setText(mNote.getAppearance());
mAroRating = mNote.getAroscore();
mAroRatingView.setText(getString(R.string.N_outOfFour,
Utils.toFrac(mAroRating)));
mAroView.setText(mNote.getAroma());
mMouRating = mNote.getMouscore();
mMouRatingView.setText(getString(R.string.N_outOfTen,
Utils.toFrac(mMouRating)));
mMouView.setText(mNote.getMouthfeel());
mOvrRating = mNote.getOvrscore();
mOvrRatingView.setText(getString(R.string.N_outOfThree,
Utils.toFrac(mOvrRating)));
mNotesView.setText(mNote.getNotes());
}
}
@Override
public void onStart() {
super.onStart();
Utils.trackActivityStart(this);
}
@Override
protected void onSaveInstanceState(Bundle outState) {
mNote = makeNewNote(mNote == null ? -1 : mNote.getId());
outState.putSerializable("note", mNote);
super.onSaveInstanceState(outState);
}
@Override
public void onStop() {
super.onStop();
Utils.trackActivityStop(this);
}
private OnClickListener onSampledViewClicked = new OnClickListener() {
@Override
public void onClick(View v) {
Calendar cal = Calendar.getInstance(Locale.getDefault());
String sampled = mSampledView.getText().toString();
if (!"".equals(sampled)) {
try {
cal.setTime(mDateFormat.parse(sampled));
} catch (ParseException e) {
Log.w(Utils.APPTAG, e.getLocalizedMessage());
}
}
DialogFragment dialog = DatePickerFragment.newInstance(cal);
dialog.show(getSupportFragmentManager(), "datePicker");
}
};
@Override
public void setDateFromDatePicker(int y, int m, int d) {
Calendar cal = Calendar.getInstance(Locale.getDefault());
cal.set(y, m, d);
mSampledView.setText(mDateFormat.format(cal.getTime()));
}
private OnClickListener onPkgViewClicked = new OnClickListener() {
@Override
public void onClick(View v) {
DialogFragment newFragment = new PkgPickerFragment();
newFragment.show(getSupportFragmentManager(), "pkgPicker");
}
};
@Override
public void setPkgFromPicker(String p) {
mPkgView.setText(p);
}
@Override
public void onRatingSet(int viewId, float rating) {
switch (viewId) {
case R.id.editablebeernote_appRating:
mAppRatingView.setText(getString(R.string.N_outOfThree,
Utils.toFrac(rating)));
mAppRating = rating;
break;
case R.id.editablebeernote_aroRating:
mAroRatingView.setText(getString(R.string.N_outOfFour,
Utils.toFrac(rating)));
mAroRating = rating;
break;
case R.id.editablebeernote_mouRating:
mMouRatingView.setText(getString(R.string.N_outOfTen,
Utils.toFrac(rating)));
mMouRating = rating;
break;
case R.id.editablebeernote_ovrRating:
mOvrRatingView.setText(getString(R.string.N_outOfThree,
Utils.toFrac(rating)));
mOvrRating = rating;
break;
}
mScoreView
.setText(getString(
R.string.N_out_of_20_points,
Utils.toFrac(mAppRating + mAroRating + mMouRating
+ mOvrRating)));
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.editable_note, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_save:
mNote = makeNewNote(mNote == null ? -1 : mNote.getId());
mNote.save(this);
this.finish();
return true;
case R.id.action_cancel:
this.finish();
return true;
}
return super.onOptionsItemSelected(item);
}
private BeerNote makeNewNote(final long noteid) {
BeerNote note = BeerNote.newInstance(this, noteid, Source.MY);
note.setAppearance(mAppView.getText().toString());
note.setAppscore(mAppRating);
note.setAroma(mAroView.getText().toString());
note.setAroscore(mAroRating);
note.setBeerId(mBeerId);
note.setMouscore(mMouRating);
note.setMouthfeel(mMouView.getText().toString());
note.setNotes(mNotesView.getText().toString());
note.setOvrscore(mOvrRating);
note.setPkg(mPkgView.getText().toString());
note.setPlace(mPlaceView.getText().toString());
note.setSampled(mSampledView.getText().toString());
return note;
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://wso2.com) All Rights Reserved.
* <p>
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.gateway.core.flow.mediators.builtin.manipulators.log;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.soap.SOAPEnvelope;
import org.apache.axiom.soap.SOAPHeader;
import org.apache.axiom.soap.SOAPHeaderBlock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.gateway.core.Constants;
import org.wso2.carbon.gateway.core.config.Parameter;
import org.wso2.carbon.gateway.core.config.ParameterHolder;
import org.wso2.carbon.gateway.core.flow.AbstractMediator;
import org.wso2.carbon.gateway.core.flow.contentaware.MIMEType;
import org.wso2.carbon.gateway.core.flow.contentaware.messagereaders.Reader;
import org.wso2.carbon.gateway.core.flow.contentaware.messagereaders.ReaderRegistryImpl;
import org.wso2.carbon.messaging.CarbonCallback;
import org.wso2.carbon.messaging.CarbonMessage;
import org.wso2.carbon.messaging.MessageDataSource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
* Implementation of Log Mediator
*/
public class LogMediator extends AbstractMediator {
private static final String CATEGORY = "category";
private static final String LEVEL = "level";
private static final String SEPARATOR = "separator";
/**
* Only properties specified to the Log mediator
*/
public static final int CUSTOM = 0;
/**
* To, From, WSAction, SOAPAction, ReplyTo, MessageID and any properties
*/
public static final int SIMPLE = 1;
/**
* All SOAP header blocks and any properties
*/
public static final int HEADERS = 2;
/**
* all attributes of level 'simple' and the SOAP envelope and any properties
*/
public static final int FULL = 3;
public static final int CATEGORY_INFO = 0;
public static final int CATEGORY_DEBUG = 1;
public static final int CATEGORY_TRACE = 2;
public static final int CATEGORY_WARN = 3;
public static final int CATEGORY_ERROR = 4;
public static final int CATEGORY_FATAL = 5;
public static final String DEFAULT_SEP = ", ";
/**
* The default log level is set to SIMPLE
*/
private int logLevel = SIMPLE;
/**
* The separator for which used to separate logging information
*/
private String separator = DEFAULT_SEP;
/**
* Category of the log statement
*/
private int category = CATEGORY_INFO;
/**
* The holder for the custom properties
*/
private final List<LogMediatorProperty> properties = new ArrayList<>();
private static final Logger log = LoggerFactory.getLogger(LogMediator.class);
public LogMediator() {
}
@Override
public String getName() {
return "log";
}
@Override
public boolean receive(CarbonMessage carbonMessage, CarbonCallback carbonCallback) throws Exception {
boolean trace = category == 2 ? true : false;
MediatorLog mediatorLog = new MediatorLog(log, trace, carbonMessage);
Reader reader = null;
if (!carbonMessage.isAlreadyRead()) {
reader = ReaderRegistryImpl.getInstance().getReader(carbonMessage);
if (reader == null) {
String errMsg = "Cannot find registered message reader for incoming content Type";
log.error(errMsg);
throw new Exception(errMsg);
}
}
switch (category) {
case CATEGORY_INFO:
mediatorLog.auditLog(getLogMessage(carbonMessage, reader));
break;
case CATEGORY_TRACE:
if (mediatorLog.isTraceEnabled()) {
mediatorLog.auditTrace(getLogMessage(carbonMessage, reader));
}
break;
case CATEGORY_DEBUG:
if (mediatorLog.isDebugEnabled()) {
mediatorLog.auditDebug(getLogMessage(carbonMessage, reader));
}
break;
case CATEGORY_WARN:
mediatorLog.auditWarn(getLogMessage(carbonMessage, reader));
break;
case CATEGORY_ERROR:
mediatorLog.auditError(getLogMessage(carbonMessage, reader));
break;
case CATEGORY_FATAL:
mediatorLog.auditFatal(getLogMessage(carbonMessage, reader));
break;
default:
break;
}
return next(carbonMessage, carbonCallback);
}
public void setParameters(ParameterHolder parameterHolder) {
Parameter levelParameter = parameterHolder.getParameter(LEVEL);
Parameter catageryParameter = parameterHolder.getParameter(CATEGORY);
Parameter seperatorParameter = parameterHolder.getParameter(SEPARATOR);
if (levelParameter != null) {
if (levelParameter.getValue().toUpperCase(Locale.getDefault()).equals("SIMPLE")) {
logLevel = SIMPLE;
} else if (levelParameter.getValue().toUpperCase(Locale.getDefault()).equals("CUSTOM")) {
logLevel = CUSTOM;
} else if (levelParameter.getValue().toUpperCase(Locale.getDefault()).equals("HEADERS")) {
logLevel = HEADERS;
} else if (levelParameter.getValue().toUpperCase(Locale.getDefault()).equals("FULL")) {
logLevel = FULL;
}
parameterHolder.removeParameter(levelParameter.getName());
}
if (catageryParameter != null) {
if (catageryParameter.getValue().toUpperCase(Locale.getDefault()).equals("INFO")) {
category = CATEGORY_INFO;
} else if (catageryParameter.getValue().toUpperCase(Locale.getDefault()).equals("ERROR")) {
category = CATEGORY_ERROR;
} else if (catageryParameter.getValue().toUpperCase(Locale.getDefault()).equals("WARN")) {
category = CATEGORY_WARN;
} else if (catageryParameter.getValue().toUpperCase(Locale.getDefault()).equals("FATAL")) {
category = CATEGORY_FATAL;
} else if (catageryParameter.getValue().toUpperCase(Locale.getDefault()).equals("DEBUG")) {
category = CATEGORY_DEBUG;
} else if (catageryParameter.getValue().toUpperCase(Locale.getDefault()).equals("TRACE")) {
category = CATEGORY_TRACE;
}
parameterHolder.removeParameter(catageryParameter.getName());
}
if (seperatorParameter != null) {
separator = seperatorParameter.getValue();
parameterHolder.removeParameter(seperatorParameter.getName());
}
Map<String, Parameter> properties = parameterHolder.getParameters();
for (Map.Entry entry : properties.entrySet()) {
String key = (String) entry.getKey();
Parameter parameter = (Parameter) entry.getValue();
String val = parameter.getValue();
String expression = null;
if (val.startsWith("xpath=")) {
expression = val.substring(("xpath=").length());
} else if (val.startsWith("jsonPath=")) {
expression = val.substring(("jsonPath=").length());
}
if (expression != null) {
Map<String, String> map = getNameSpaceMap(properties);
if (map != null) {
LogMediatorProperty logMediatorProperty = new LogMediatorProperty(key, null, expression, map);
this.properties.add(logMediatorProperty);
} else {
LogMediatorProperty logMediatorProperty = new LogMediatorProperty(key, null, expression);
this.properties.add(logMediatorProperty);
}
} else if (!key.startsWith("namespace=")) {
LogMediatorProperty logMediatorProperty = new LogMediatorProperty(key, val, null);
this.properties.add(logMediatorProperty);
}
}
}
private String getCustomLogMessage(CarbonMessage carbonMessage, Reader reader) throws Exception {
StringBuffer sb = new StringBuffer();
setCustomProperties(sb, carbonMessage, reader);
return trimLeadingSeparator(sb);
}
private String getSimpleLogMessage(CarbonMessage carbonMessage, Reader reader) throws Exception {
StringBuffer sb = new StringBuffer();
if (carbonMessage.getHeader(org.wso2.carbon.messaging.Constants.TO) != null) {
sb.append("To: ").append(carbonMessage.getHeader(org.wso2.carbon.messaging.Constants.TO));
} else if (carbonMessage.getProperty(org.wso2.carbon.messaging.Constants.TO) != null) {
sb.append("To: ").append(carbonMessage.getProperty(org.wso2.carbon.messaging.Constants.TO));
} else {
sb.append("To: ");
if (carbonMessage.getProperty(org.wso2.carbon.messaging.Constants.HOST) != null
&& carbonMessage.getProperty(org.wso2.carbon.messaging.Constants.PORT) != null) {
String receivedFrom = carbonMessage.getProperty(org.wso2.carbon.messaging.Constants.HOST) + ":" +
carbonMessage.getProperty(org.wso2.carbon.messaging.Constants.PORT);
sb.append(separator).append("FROM: ").append(receivedFrom);
}
if (carbonMessage.getProperty("WSAction") != null) {
sb.append(separator).append("WSAction: ").append(carbonMessage.getProperty("WSAction"));
}
if (carbonMessage.getProperty(Constants.SOAPACTION) != null) {
sb.append(separator).append("SOAPAction: ").append(carbonMessage.getProperty(Constants.SOAPACTION));
}
if (carbonMessage.getProperty("ReplyTo") != null) {
sb.append(separator).append("ReplyTo: ").append(carbonMessage.getProperty("ReplyTo"));
}
if (carbonMessage.getProperty("MessageID") != null) {
sb.append(separator).append("MessageID: ").append(carbonMessage.getProperty("MessageID"));
sb.append(separator).append("Direction: ")
.append(carbonMessage.getProperty(org.wso2.carbon.messaging.Constants.DIRECTION) != null ?
carbonMessage.getProperty(org.wso2.carbon.messaging.Constants.DIRECTION) :
"request");
setCustomProperties(sb, carbonMessage, reader);
}
}
return
trimLeadingSeparator(sb);
}
private String getHeadersLogMessage(CarbonMessage carbonMessage, Reader reader) throws Exception {
StringBuffer sb = new StringBuffer();
MessageDataSource messageDataSource = carbonMessage.getMessageDataSource();
if (messageDataSource == null) {
messageDataSource = reader.makeMessageReadable(carbonMessage);
}
if (messageDataSource.getDataObject() != null && messageDataSource.getDataObject() instanceof OMElement) {
OMElement omElement = (OMElement) messageDataSource.getDataObject();
if (omElement instanceof SOAPEnvelope) {
try {
SOAPHeader header = (SOAPHeader) ((SOAPEnvelope) omElement).getHeader();
if (header != null) {
for (Iterator iter = header.examineAllHeaderBlocks(); iter.hasNext(); ) {
Object o = iter.next();
if (o instanceof SOAPHeaderBlock) {
SOAPHeaderBlock headerBlk = (SOAPHeaderBlock) o;
sb.append(separator).append(headerBlk.getLocalName()).
append(" : ").append(headerBlk.getText());
} else if (o instanceof OMElement) {
OMElement headerElem = (OMElement) o;
sb.append(separator).append(headerElem.getLocalName()).
append(" : ").append(headerElem.getText());
}
}
}
} catch (Exception e) {
log.error("Exception occurred while processing SOAPHeader", e);
return null;
}
}
}
setCustomProperties(sb, carbonMessage, reader);
return trimLeadingSeparator(sb);
}
private String getFullLogMessage(CarbonMessage carbonMessage, Reader reader) throws Exception {
StringBuffer sb = new StringBuffer();
sb.append(getSimpleLogMessage(carbonMessage, reader));
MessageDataSource messageDataSource = carbonMessage.getMessageDataSource();
if (messageDataSource == null) {
messageDataSource = reader.makeMessageReadable(carbonMessage);
}
if (isJSONMessage(messageDataSource)) {
sb.append(separator).append("Payload: ").append(messageDataSource.getValueAsString("$"));
} else if (isSOAPMessage(messageDataSource)) {
sb.append(separator).append("Envelope: ").append(messageDataSource.getDataObject().toString());
}
return trimLeadingSeparator(sb);
}
private String getLogMessage(CarbonMessage carbonMessage, Reader reader) throws Exception {
switch (logLevel) {
case CUSTOM:
return getCustomLogMessage(carbonMessage, reader);
case SIMPLE:
return getSimpleLogMessage(carbonMessage, reader);
case HEADERS:
return getHeadersLogMessage(carbonMessage, reader);
case FULL:
return getFullLogMessage(carbonMessage, reader);
default:
return "Invalid log level specified";
}
}
private void setCustomProperties(StringBuffer sb, CarbonMessage carbonMessage, Reader reader) throws Exception {
if (properties != null && !properties.isEmpty()) {
for (LogMediatorProperty property : properties) {
if (property != null) {
if (property.getValue() != null) {
sb.append(separator).append(property.getKey()).append(" = ")
.append(getValue(carbonMessage, property.getValue()));
} else {
if (carbonMessage.getMessageDataSource() != null) {
sb.append(separator).append(property.getKey()).append(" = ")
.append(property.getNameSpaceMap() == null ?
carbonMessage.getMessageDataSource()
.getValueAsString(property.getExpression()) :
carbonMessage.getMessageDataSource()
.getValueAsString(property.getExpression(),
property.getNameSpaceMap()));
} else {
MessageDataSource messageDataSource = reader.makeMessageReadable(carbonMessage);
sb.append(separator).append(property.getKey()).append(" = ")
.append(property.getNameSpaceMap() == null ?
messageDataSource.getValueAsString(property.getExpression()) :
messageDataSource.getValueAsString(property.getExpression(),
property.getNameSpaceMap()));
}
}
}
}
}
}
private String trimLeadingSeparator(StringBuffer sb) {
String retStr = sb.toString();
if (retStr.startsWith(separator)) {
return retStr.substring(separator.length());
} else {
return retStr;
}
}
private boolean isSOAPMessage(MessageDataSource messageDataSource) {
if (messageDataSource.getContentType().equals(MIMEType.APPLICATION_XML) ||
messageDataSource.getContentType().equals(MIMEType.APPLICATION_SOAP_XML) ||
messageDataSource.getContentType().equals(MIMEType.TEXT_XML)) {
return true;
}
return false;
}
private boolean isJSONMessage(MessageDataSource messageDataSource) {
if (messageDataSource.getContentType().equals(MIMEType.APPLICATION_JSON)) {
return true;
}
return false;
}
private Map<String, String> getNameSpaceMap(Map<String, Parameter> parameterMap) {
Map<String, String> nameSpaceMap = null;
for (Map.Entry entry : parameterMap.entrySet()) {
String key = (String) entry.getKey();
String modifiedKey = null;
Parameter parameter = (Parameter) entry.getValue();
String val = parameter.getValue();
if (key.startsWith("namespace=")) {
modifiedKey = key.substring(("namespace=").length());
if (nameSpaceMap == null) {
nameSpaceMap = new HashMap<>();
}
nameSpaceMap.put(modifiedKey, val);
}
}
return nameSpaceMap;
}
}
| |
/*
* Copyright (c) 2011-2017 Pivotal Software Inc, All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactor.core.publisher;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Assert;
import org.junit.Test;
import reactor.core.CoreSubscriber;
import reactor.core.Fuseable;
import reactor.core.Scannable;
import reactor.test.StepVerifier;
import reactor.test.subscriber.AssertSubscriber;
import static org.assertj.core.api.Assertions.assertThat;
public class FluxGenerateTest {
@Test(expected = NullPointerException.class)
public void stateSupplierNull() {
Flux.generate(null, (s, o) -> s, s -> {
});
}
@Test(expected = NullPointerException.class)
public void generatorNull() {
Flux.generate(() -> 1, null, s -> {
});
}
@Test(expected = NullPointerException.class)
public void stateConsumerNull() {
Flux.generate(() -> 1, (s, o) -> s, null);
}
@Test
public void generateEmpty() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer>generate(o -> {
o.complete();
}).subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertComplete();
}
@Test
public void generateJust() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer>generate(o -> {
o.next(1);
o.complete();
}).subscribe(ts);
ts.assertValues(1)
.assertNoError()
.assertComplete();
}
@Test
public void generateError() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer>generate(o -> {
o.error(new RuntimeException("forced failure"));
}).subscribe(ts);
ts.assertNoValues()
.assertNotComplete()
.assertError(RuntimeException.class)
.assertErrorMessage("forced failure");
}
@Test
public void generateJustBackpressured() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(0);
Flux.<Integer>generate(o -> {
o.next(1);
o.complete();
}).subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(2);
ts.assertValues(1)
.assertNoError()
.assertComplete();
}
@Test
public void generateRange() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer, Integer>generate(() -> 1, (s, o) -> {
if (s < 11) {
o.next(s);
}
else {
o.complete();
}
return s + 1;
}).subscribe(ts);
ts.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.assertNoError()
.assertComplete();
}
@Test
public void generateRangeBackpressured() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(0);
Flux.<Integer, Integer>generate(() -> 1, (s, o) -> {
if (s < 11) {
o.next(s);
}
else {
o.complete();
}
return s + 1;
}).subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(2);
ts.assertValues(1, 2)
.assertNoError()
.assertNotComplete();
ts.request(10);
ts.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.assertNoError()
.assertComplete();
}
@Test
public void stateSupplierThrows() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer, Integer>generate(() -> {
throw new RuntimeException("forced failure");
}, (s, o) -> {
o.next(1);
return s;
}).subscribe(ts);
ts.assertNoValues()
.assertNotComplete()
.assertError(RuntimeException.class);
}
@Test
public void generatorThrows() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer>generate(o -> {
throw new RuntimeException("forced failure");
}).subscribe(ts);
ts.assertNoValues()
.assertNotComplete()
.assertError(RuntimeException.class)
.assertErrorMessage("forced failure");
}
@Test
public void generatorMultipleOnErrors() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer>generate(o -> {
o.error(new RuntimeException("forced failure"));
o.error(new RuntimeException("forced failure"));
}).subscribe(ts);
ts.assertNoValues()
.assertNotComplete()
.assertError(RuntimeException.class)
.assertErrorMessage("forced failure");
}
@Test
public void generatorMultipleOnCompletes() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer>generate(o -> {
o.complete();
o.complete();
}).subscribe(ts);
ts.assertNoValues()
.assertComplete()
.assertNoError();
}
@Test
public void generatorMultipleOnNexts() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer>generate(o -> {
o.next(1);
o.next(1);
}).subscribe(ts);
ts.assertValues(1)
.assertNotComplete()
.assertError(IllegalStateException.class);
}
@Test
public void stateConsumerCalled() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
AtomicInteger stateConsumer = new AtomicInteger();
Flux.<Integer, Integer>generate(() -> 1, (s, o) -> {
o.complete();
return s;
}, stateConsumer::set).subscribe(ts);
ts.assertNoValues()
.assertComplete()
.assertNoError();
Assert.assertEquals(1, stateConsumer.get());
}
@Test
public void iterableSource() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
Flux.<Integer, Iterator<Integer>>generate(list::iterator, (s, o) -> {
if (s.hasNext()) {
o.next(s.next());
}
else {
o.complete();
}
return s;
}).subscribe(ts);
ts.assertValueSequence(list)
.assertComplete()
.assertNoError();
}
@Test
public void iterableSourceBackpressured() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(0);
List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
Flux.<Integer, Iterator<Integer>>generate(list::iterator, (s, o) -> {
if (s.hasNext()) {
o.next(s.next());
}
else {
o.complete();
}
return s;
}).subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(2);
ts.assertValues(1, 2)
.assertNoError()
.assertNotComplete();
ts.request(5);
ts.assertValues(1, 2, 3, 4, 5, 6, 7)
.assertNoError()
.assertNotComplete();
ts.request(10);
ts.assertValueSequence(list)
.assertComplete()
.assertNoError();
}
@Test
public void fusion() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
ts.requestedFusionMode(Fuseable.ANY);
List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
Flux.<Integer, Iterator<Integer>>generate(() -> list.iterator(), (s, o) -> {
if (s.hasNext()) {
o.next(s.next());
}
else {
o.complete();
}
return s;
}).subscribe(ts);
ts.assertFuseableSource()
.assertFusionMode(Fuseable.SYNC)
.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
}
@Test
public void fusionBoundary() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
ts.requestedFusionMode(Fuseable.ANY | Fuseable.THREAD_BARRIER);
List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
Flux.<Integer, Iterator<Integer>>generate(list::iterator, (s, o) -> {
if (s.hasNext()) {
o.next(s.next());
}
else {
o.complete();
}
return s;
}).subscribe(ts);
ts.assertFuseableSource()
.assertFusionMode(Fuseable.NONE)
.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
}
@Test
public void scanSubscription() {
CoreSubscriber<Integer> subscriber = new LambdaSubscriber<>(null, e -> {}, null, null);
FluxGenerate.GenerateSubscription<Integer, Integer> test =
new FluxGenerate.GenerateSubscription<>(subscriber, 1, (s, o) -> null, s -> {});
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(subscriber);
test.request(5);
assertThat(test.scan(Scannable.Attr.REQUESTED_FROM_DOWNSTREAM)).isEqualTo(5L);
assertThat(test.scan(Scannable.Attr.ERROR)).isNull();
}
@Test
public void scanSubscriptionError() {
CoreSubscriber<Integer> subscriber = new LambdaSubscriber<>(null, e -> {}, null, null);
FluxGenerate.GenerateSubscription<Integer, Integer> test =
new FluxGenerate.GenerateSubscription<>(subscriber, 1, (s, o) -> null, s -> {});
test.error(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
assertThat(test.scan(Scannable.Attr.ERROR)).isNull();
}
@Test
public void scanSubscriptionCancelled() {
CoreSubscriber<Integer> subscriber = new LambdaSubscriber<>(null, e -> {}, null, null);
FluxGenerate.GenerateSubscription<Integer, Integer> test =
new FluxGenerate.GenerateSubscription<>(subscriber, 1, (s, o) -> null, s -> {});
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
@Test
public void contextTest() {
StepVerifier.create(Flux.generate(s -> s.next(s.currentContext()
.get(AtomicInteger.class)
.incrementAndGet()))
.take(10)
.subscriberContext(ctx -> ctx.put(AtomicInteger.class,
new AtomicInteger())))
.expectNext(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.verifyComplete();
}
}
| |
/*
* Copyright 2013 Guidewire Software, Inc.
*/
package gw.internal.gosu.ir.compiler.bytecode;
import gw.internal.ext.org.objectweb.asm.Label;
import gw.internal.ext.org.objectweb.asm.MethodVisitor;
import gw.internal.gosu.compiler.NamedLabel;
import gw.lang.ir.IRElement;
import gw.lang.ir.IRSymbol;
import gw.lang.ir.IRType;
import gw.lang.ir.statement.IRTerminalStatement;
import gw.lang.ir.statement.IRTryCatchFinallyStatement;
import gw.util.GosuExceptionUtil;
import gw.util.Stack;
import java.util.ArrayList;
import java.util.List;
public class IRBytecodeContext {
private MethodVisitor _mv;
private Stack<IRCompilerScope> _scopes;
private List<IRCompilerLocalVar> _allLocalVars;
private Stack<IRFinallyCodePartitioner> _finallyStatements;
private int _tempVarCount;
private Stack<Label> _breakLabels;
private Stack<Label> _continueLabels;
private Label _lastVisitedLabel;
private int _lastLineNumber;
public IRBytecodeContext(MethodVisitor mv) {
_mv = mv;
_scopes = new Stack<IRCompilerScope>();
pushScope();
_allLocalVars = new ArrayList<IRCompilerLocalVar>();
_finallyStatements = new Stack<IRFinallyCodePartitioner>();
_breakLabels = new Stack<Label>();
_continueLabels = new Stack<Label>();
_lastLineNumber = -1;
}
public MethodVisitor getMv() {
return _mv;
}
public void visitLabel(Label label) {
_lastVisitedLabel = label;
_mv.visitLabel( label );
for( IRCompilerLocalVar lv : _allLocalVars )
{
if( lv.getStartLabel() == null )
{
lv.setStartLabel( label );
}
if( isOutOfScope( lv ) && lv.getEndLabel() == null )
{
lv.setEndLabel( label );
}
}
}
public int getLocalCount()
{
return _allLocalVars.size();
}
public int getMaxScopeSize()
{
int iMax = 0;
for( IRCompilerLocalVar local : _allLocalVars )
{
iMax = Math.max( local.getScope().getLocalVars().size(), iMax );
}
return iMax;
}
private boolean isOutOfScope( IRCompilerLocalVar lv )
{
return !lv.getScope().isActive();
}
public void visitLocalVars()
{
for( IRCompilerLocalVar lv : _allLocalVars )
{
if( !lv.isTemp() )
{
try {
if (!lv.getStartLabel().equals(lv.getEndLabel())) {
_mv.visitLocalVariable(
lv.getName(), lv.getType().getDescriptor(), null,
lv.getStartLabel(), lv.getEndLabel(), lv.getIndex());
}
}
catch( Exception e )
{
throw GosuExceptionUtil.forceThrow( e, lv.getName() );
}
}
}
}
public void pushScope() {
_scopes.push(new IRCompilerScope(_scopes.isEmpty() ? null : _scopes.peek()));
}
public void popScope() {
IRCompilerScope oldScope = _scopes.pop();
oldScope.scopeRemoved();
}
public void indexThis(IRType type) {
Label label = new Label();
visitLabel( label );
IRCompilerLocalVar thisVar = getLocalVar( new IRSymbol( "this", type, false ) );
thisVar.setStartLabel( label );
}
public void indexSymbols(List<IRSymbol> symbols) {
for (IRSymbol symbol : symbols) {
getLocalVar( symbol );
}
}
public IRCompilerLocalVar getLocalVar(IRSymbol symbol) {
IRCompilerLocalVar localVar = _scopes.peek().findLocalVar( symbol );
if (localVar == null) {
localVar = _scopes.peek().createLocalVar(symbol);
//## note: We don't assign the start label here because local vars are not in scope until after their declaration
//## The start label is assigned during in visitLabel() above.
//
// if (_lastVisitedLabel != null) {
// localVar.setStartLabel(_lastVisitedLabel);
// }
_allLocalVars.add( localVar );
}
return localVar;
}
public IRCompilerLocalVar makeTempVar(IRType type) {
return getLocalVar( new IRSymbol( "$$compilertemp$$" + (_tempVarCount++), type, true) );
}
public IRFinallyCodePartitioner pushFinallyStatement( IRTryCatchFinallyStatement tryCatchFinallyStmt )
{
IRFinallyCodePartitioner partition = new IRFinallyCodePartitioner( this, tryCatchFinallyStmt );
_finallyStatements.push( partition );
return partition;
}
public void popFinallyStatement( IRFinallyCodePartitioner partition )
{
IRFinallyCodePartitioner popped = _finallyStatements.pop();
if( popped != partition )
{
throw new IllegalStateException(
"Finally statements out of order. " +
"Expected '" + partition + "', but got '" + popped );
}
}
public boolean hasFinallyStatements()
{
return !_finallyStatements.isEmpty();
}
public Stack<IRFinallyCodePartitioner> getFinallyParitioners()
{
return _finallyStatements;
}
public IRFinallyCodePartitioner peekFinallyPartitioner() {
return _finallyStatements.peek();
}
public void inlineFinallyStatements( IRTerminalStatement stmt )
{
if( !hasFinallyStatements() )
{
return;
}
Stack<IRFinallyCodePartitioner> partitions = getFinallyParitioners();
List<IRFinallyCodePartitioner> inlinedFinallys = new ArrayList<IRFinallyCodePartitioner>();
for( int i = partitions.size() - 1; i >= 0; i-- )
{
IRFinallyCodePartitioner partition = partitions.get( i );
if( !partition.appliesTo( stmt ) )
{
// once a try block does not apply to a point, no enclosing try blocks do
break;
}
partition.inlineFinally();
inlinedFinallys.add( partition );
}
// stop the finally coverage for all nested finally's at the very end of the inlined finally statements
NamedLabel endLabel = new NamedLabel( "EndFinally" );
visitLabel( endLabel );
for( IRFinallyCodePartitioner inlinedFinally : inlinedFinallys )
{
inlinedFinally.endInlineFinally( endLabel );
}
}
public void compile( IRElement element ) {
IRBytecodeCompiler.compileIRElement( element, this );
}
public void pushBreakLabel( Label label ) {
_breakLabels.push( label );
}
public void popBreakLabel() {
_breakLabels.pop();
}
public void pushContinueLabel( Label label ) {
_continueLabels.push( label );
}
public void popContinueLabel() {
_continueLabels.pop();
}
public Label getCurrentBreakLabel() {
return _breakLabels.peek();
}
public Label getCurrentContinueLabel() {
return _continueLabels.peek();
}
public int setLineNumber( int lineNumber )
{
int lastLineNumber = _lastLineNumber;
if( lineNumber > 0 && lineNumber != lastLineNumber )
{
MethodVisitor mv = getMv();
Label label = new Label();
visitLabel( label );
mv.visitLineNumber( lineNumber, label );
_lastLineNumber = lineNumber;
}
return lastLineNumber;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.synapse.transport.nhttp;
import org.apache.axis2.AxisFault;
import org.apache.axis2.addressing.EndpointReference;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.MessageContext;
import org.apache.axis2.description.AxisOperation;
import org.apache.axis2.description.AxisService;
import org.apache.axis2.description.Parameter;
import org.apache.axis2.engine.AxisEngine;
import org.apache.axis2.transport.http.HTTPTransportReceiver;
import org.apache.axis2.util.JavaUtils;
import org.apache.axis2.util.MessageContextBuilder;
import org.apache.axis2.wsdl.WSDLConstants;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.nio.NHttpServerConnection;
import org.apache.http.protocol.HTTP;
import org.apache.synapse.transport.nhttp.util.RESTUtil;
import org.apache.ws.commons.schema.XmlSchema;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.util.*;
/**
* Default http Get processor implementation for Synapse.
*/
public class DefaultHttpGetProcessor implements HttpGetRequestProcessor {
private static final Log log = LogFactory.getLog(DefaultHttpGetProcessor.class);
private static final String LOCATION = "Location";
private static final String CONTENT_TYPE = "Content-Type";
private static final String TEXT_HTML = "text/html";
private static final String TEXT_XML = "text/xml";
protected ConfigurationContext cfgCtx;
protected ServerHandler serverHandler;
public void init(ConfigurationContext cfgCtx, ServerHandler serverHandler) throws AxisFault {
this.cfgCtx = cfgCtx;
this.serverHandler = serverHandler;
}
/**
* Process the HTTP GET request.
*
* @param request The HttpRequest
* @param response The HttpResponse
* @param msgContext The MessageContext
* @param conn The NHttpServerConnection
* @param os The OutputStream
*/
public void process(HttpRequest request,
HttpResponse response,
MessageContext msgContext,
NHttpServerConnection conn,
OutputStream os,
boolean isRestDispatching) {
String uri = request.getRequestLine().getUri();
String servicePath = cfgCtx.getServiceContextPath();
if (!servicePath.startsWith("/")) {
servicePath = "/" + servicePath;
}
String serviceName = getServiceName(request);
Map<String, String> parameters = new HashMap<String, String>();
int pos = uri.indexOf("?");
if (pos != -1) {
msgContext.setTo(new EndpointReference(uri.substring(0, pos)));
StringTokenizer st = new StringTokenizer(uri.substring(pos + 1), "&");
while (st.hasMoreTokens()) {
String param = st.nextToken();
pos = param.indexOf("=");
if (pos != -1) {
parameters.put(param.substring(0, pos), param.substring(pos + 1));
} else {
parameters.put(param, null);
}
}
} else {
msgContext.setTo(new EndpointReference(uri));
}
if (isServiceListBlocked(uri)) {
response.setStatusCode(HttpStatus.SC_FORBIDDEN);
serverHandler.commitResponseHideExceptions(conn, response);
} else if (uri.equals("/favicon.ico")) {
response.setStatusCode(HttpStatus.SC_MOVED_PERMANENTLY);
response.addHeader(LOCATION, "http://ws.apache.org/favicon.ico");
serverHandler.commitResponseHideExceptions(conn, response);
} else if (serviceName != null && parameters.containsKey("wsdl")) {
generateWsdl(request, response, msgContext,
conn, os, serviceName, parameters, isRestDispatching);
return;
} else if (serviceName != null && parameters.containsKey("wsdl2")) {
generateWsdl2(request, response, msgContext,
conn, os, serviceName, isRestDispatching);
return;
} else if (serviceName != null && parameters.containsKey("xsd")) {
generateXsd(request, response, msgContext, conn, os, serviceName,
parameters, isRestDispatching);
return;
} else if (serviceName != null && parameters.containsKey("info")) {
generateServiceDetailsPage(response, conn, os, serviceName);
} else if (uri.startsWith(servicePath) &&
(serviceName == null || serviceName.length() == 0)) {
generateServicesList(response, conn, os, servicePath);
} else {
processGetAndDelete(request, response, msgContext,
conn, os, "GET", isRestDispatching);
return;
}
// make sure that the output stream is flushed and closed properly
closeOutputStream(os);
}
private void closeOutputStream(OutputStream os) {
try {
os.flush();
os.close();
} catch (IOException ignore) {
}
}
/**
* Is the incoming URI is requesting service list and http.block_service_list=true in
* nhttp.properties
* @param incomingURI incoming URI
* @return whether to proceed with incomingURI
*/
protected boolean isServiceListBlocked(String incomingURI) {
String isBlocked = NHttpConfiguration.getInstance().isServiceListBlocked();
return (("/services").equals(incomingURI) || ("/services" + "/").equals(incomingURI)) &&
Boolean.parseBoolean(isBlocked);
}
/**
* Returns the service name.
*
* @param request HttpRequest
* @return service name as a String
*/
protected String getServiceName(HttpRequest request) {
String uri = request.getRequestLine().getUri();
String servicePath = cfgCtx.getServiceContextPath();
if (!servicePath.startsWith("/")) {
servicePath = "/" + servicePath;
}
String serviceName = null;
if (uri.startsWith(servicePath)) {
serviceName = uri.substring(servicePath.length());
if (serviceName.startsWith("/")) {
serviceName = serviceName.substring(1);
}
if (serviceName.contains("?")) {
serviceName = serviceName.substring(0, serviceName.indexOf("?"));
}
} else {
// this may be a custom URI
String incomingURI = request.getRequestLine().getUri();
Map serviceURIMap = (Map) cfgCtx.getProperty(NhttpConstants.EPR_TO_SERVICE_NAME_MAP);
if (serviceURIMap != null) {
Set keySet = serviceURIMap.keySet();
for (Object key : keySet) {
if (incomingURI.toLowerCase().contains(((String) key).toLowerCase())) {
return (String) serviceURIMap.get(key);
}
}
}
}
if (serviceName != null) {
int opnStart = serviceName.indexOf("/");
if (opnStart != -1) {
serviceName = serviceName.substring(0, opnStart);
}
}
return serviceName;
}
/**
* Generates the services list.
*
* @param response HttpResponse
* @param conn NHttpServerConnection
* @param os OutputStream
* @param servicePath service path of the service
*/
protected void generateServicesList(HttpResponse response,
NHttpServerConnection conn,
OutputStream os, String servicePath) {
try {
byte[] bytes = getServicesHTML(
servicePath.endsWith("/") ? "" : servicePath + "/").getBytes();
response.addHeader(CONTENT_TYPE, TEXT_HTML);
serverHandler.commitResponseHideExceptions(conn, response);
os.write(bytes);
} catch (IOException e) {
handleBrowserException(response, conn, os,
"Error generating services list", e);
}
}
/**
* Generates service details page.
*
* @param response HttpResponse
* @param conn NHttpServerConnection
* @param os OutputStream
* @param serviceName service name
*/
protected void generateServiceDetailsPage(HttpResponse response,
NHttpServerConnection conn,
OutputStream os, String serviceName) {
AxisService service = cfgCtx.getAxisConfiguration().
getServices().get(serviceName);
if (service != null) {
String parameterValue = (String) service.getParameterValue("serviceType");
if ("proxy".equals(parameterValue) && !isWSDLProvidedForProxyService(service)) {
handleBrowserException(response, conn, os,
"No WSDL was provided for the Service " + serviceName +
". A WSDL cannot be generated.", null);
}
try {
byte[] bytes =
HTTPTransportReceiver.printServiceHTML(serviceName, cfgCtx).getBytes();
response.addHeader(CONTENT_TYPE, TEXT_HTML);
serverHandler.commitResponseHideExceptions(conn, response);
os.write(bytes);
} catch (IOException e) {
handleBrowserException(response, conn, os,
"Error generating service details page for : " + serviceName, e);
}
} else {
handleBrowserException(response, conn, os,
"Invalid service : " + serviceName, null);
}
}
/**
* Generates Schema.
*
* @param request HttpRequest
* @param response HttpResponse
* @param messageCtx Current MessageContext
* @param conn NHttpServerConnection
* @param os OutputStream
* @param serviceName service name
* @param parameters url parameters
* @param isRestDispatching Whether to handle this as REST
*/
protected void generateXsd(HttpRequest request, HttpResponse response,
MessageContext messageCtx, NHttpServerConnection conn,
OutputStream os, String serviceName,
Map<String, String> parameters, boolean isRestDispatching) {
if (parameters.get("xsd") == null || "".equals(parameters.get("xsd"))) {
AxisService service = cfgCtx.getAxisConfiguration()
.getServices().get(serviceName);
if (service != null) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
service.printSchema(baos);
response.addHeader(CONTENT_TYPE, TEXT_XML);
serverHandler.commitResponseHideExceptions(conn, response);
os.write(baos.toByteArray());
closeOutputStream(os);
} catch (Exception e) {
handleBrowserException(response, conn, os,
"Error generating ?xsd output for service : " + serviceName, e);
}
} else {
processGetAndDelete(request, response, messageCtx, conn, os,
serviceName, isRestDispatching);
}
} else {
//cater for named xsds - check for the xsd name
String schemaName = parameters.get("xsd");
AxisService service = cfgCtx.getAxisConfiguration()
.getServices().get(serviceName);
if (service != null) {
//run the population logic just to be sure
service.populateSchemaMappings();
//write out the correct schema
Map schemaTable = service.getSchemaMappingTable();
XmlSchema schema = (XmlSchema) schemaTable.get(schemaName);
if (schema == null) {
int dotIndex = schemaName.indexOf('.');
if (dotIndex > 0) {
String schemaKey = schemaName.substring(0, dotIndex);
schema = (XmlSchema) schemaTable.get(schemaKey);
}
}
//schema found - write it to the stream
if (schema != null) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
schema.write(baos);
response.addHeader(CONTENT_TYPE, TEXT_XML);
serverHandler.commitResponseHideExceptions(conn, response);
os.write(baos.toByteArray());
closeOutputStream(os);
} catch (Exception e) {
handleBrowserException(response, conn, os,
"Error generating named ?xsd output for service : " + serviceName, e);
}
} else {
// no schema available by that name - send 404
response.setStatusCode(HttpStatus.SC_NOT_FOUND);
closeOutputStream(os);
}
} else {
processGetAndDelete(request, response, messageCtx, conn, os,
serviceName, isRestDispatching);
}
}
}
/**
* Generate WSDL2.
*
* @param request HttpRequest
* @param response HttpResponse
* @param msgContext MessageContext
* @param conn NHttpServerConnection
* @param os OutputStream
* @param serviceName service name
* @param isRestDispatching weather nhttp should do rest dispatching
*/
protected void generateWsdl2(HttpRequest request, HttpResponse response,
MessageContext msgContext,
NHttpServerConnection conn,
OutputStream os, String serviceName, boolean isRestDispatching) {
AxisService service = cfgCtx.getAxisConfiguration().
getServices().get(serviceName);
if (service != null) {
String parameterValue = (String) service.getParameterValue("serviceType");
if ("proxy".equals(parameterValue) && !isWSDLProvidedForProxyService(service)) {
handleBrowserException(response, conn, os,
"No WSDL was provided for the Service " + serviceName +
". A WSDL cannot be generated.", null);
}
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
service.printWSDL2(baos, getIpAddress());
response.addHeader(CONTENT_TYPE, TEXT_XML);
serverHandler.commitResponseHideExceptions(conn, response);
os.write(baos.toByteArray());
closeOutputStream(os);
} catch (Exception e) {
handleBrowserException(response, conn, os,
"Error generating ?wsdl2 output for service : " + serviceName, e);
}
} else {
processGetAndDelete(request, response, msgContext,
conn, os, "GET", isRestDispatching);
}
}
/**
* Generate WSDL.
*
* @param request HttpRequest
* @param response HttpResponse
* @param msgContext MessageContext
* @param conn NHttpServerConnection
* @param os OutputStream
* @param serviceName service name
* @param parameters parameters
* @param isRestDispatching if restDispatching is on
*/
protected void generateWsdl(HttpRequest request, HttpResponse response,
MessageContext msgContext,
NHttpServerConnection conn,
OutputStream os, String serviceName,
Map<String, String> parameters, boolean isRestDispatching) {
AxisService service = cfgCtx.getAxisConfiguration().
getServices().get(serviceName);
if (service != null) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
String parameterValue = parameters.get("wsdl");
if (parameterValue == null) {
service.printWSDL(baos, getIpAddress());
} else {
// here the parameter value should be the wsdl file name
service.printUserWSDL(baos, parameterValue);
}
response.addHeader(CONTENT_TYPE, TEXT_XML);
serverHandler.commitResponseHideExceptions(conn, response);
os.write(baos.toByteArray());
closeOutputStream(os);
} catch (Exception e) {
handleBrowserException(response, conn, os,
"Error generating ?wsdl output for service : " + serviceName, e);
}
} else {
processGetAndDelete(request, response, msgContext,
conn, os, "GET", isRestDispatching);
}
}
/**
* Calls the RESTUtil to process GET and DELETE Request
*
* @param request HttpRequest
* @param response HttpResponse
* @param msgContext MessageContext
* @param conn NHttpServerConnection
* @param os OutputStream
* @param method HTTP method, either GET or DELETE
* @param isRestDispatching weather transport should do rest dispatching
*/
protected void processGetAndDelete(HttpRequest request, HttpResponse response,
MessageContext msgContext,
NHttpServerConnection conn, OutputStream os,
String method, boolean isRestDispatching) {
try {
RESTUtil.processGetAndDeleteRequest(
msgContext, os, request.getRequestLine().getUri(),
request.getFirstHeader(HTTP.CONTENT_TYPE), method, isRestDispatching);
// do not let the output stream close (as by default below) since
// we are serving this GET/DELETE request through the Synapse engine
} catch (AxisFault axisFault) {
handleException(response, msgContext, conn, os,
"Error processing " + method + " request for: " +
request.getRequestLine().getUri(), axisFault);
}
}
/**
* Handles exception.
*
* @param response HttpResponse
* @param msgContext MessageContext
* @param conn NHttpServerConnection
* @param os OutputStream
* @param msg message
* @param e Exception
*/
protected void handleException(HttpResponse response, MessageContext msgContext,
NHttpServerConnection conn,
OutputStream os, String msg, Exception e) {
if (e == null) {
log.error(msg);
} else {
log.error(msg, e);
}
if (e == null) {
e = new Exception(msg);
}
try {
MessageContext faultContext = MessageContextBuilder.createFaultMessageContext(
msgContext, e);
AxisEngine.sendFault(faultContext);
} catch (Exception ex) {
response.setStatusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR);
response.addHeader(CONTENT_TYPE, TEXT_XML);
serverHandler.commitResponseHideExceptions(conn, response);
try {
os.write(msg.getBytes());
if (ex != null) {
os.write(ex.getMessage().getBytes());
}
} catch (IOException ignore) {
}
if (conn != null) {
try {
conn.shutdown();
} catch (IOException ignore) {
}
}
}
}
/**
* Handles browser exception.
*
* @param response HttpResponse
* @param conn NHttpServerConnection
* @param os OutputStream
* @param msg message
* @param e Exception
*/
protected void handleBrowserException(HttpResponse response,
NHttpServerConnection conn, OutputStream os,
String msg, Exception e) {
if (e == null) {
log.error(msg);
} else {
log.error(msg, e);
}
if (!response.containsHeader(HTTP.TRANSFER_ENCODING)) {
response.setStatusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR);
response.setReasonPhrase(msg);
response.addHeader(CONTENT_TYPE, TEXT_HTML);
serverHandler.commitResponseHideExceptions(conn, response);
try {
os.write(msg.getBytes());
os.close();
} catch (IOException ignore) {
}
}
if (conn != null) {
try {
conn.shutdown();
} catch (IOException ignore) {
}
}
}
/**
* Checks whether a wsdl is provided for a proxy service.
*
* @param service AxisService
* @return whether the wsdl is provided or not
*/
protected boolean isWSDLProvidedForProxyService(AxisService service) {
boolean isWSDLProvided = false;
if (service.getParameterValue(WSDLConstants.WSDL_4_J_DEFINITION) != null ||
service.getParameterValue(WSDLConstants.WSDL_20_DESCRIPTION) != null) {
isWSDLProvided = true;
}
return isWSDLProvided;
}
/**
* Whatever this method returns as the IP is ignored by the actual http/s listener when
* its getServiceEPR is invoked. This was originally copied from axis2
*
* @return Returns String.
* @throws java.net.SocketException if the socket can not be accessed
*/
protected static String getIpAddress() throws SocketException {
Enumeration e = NetworkInterface.getNetworkInterfaces();
String address = "127.0.0.1";
while (e.hasMoreElements()) {
NetworkInterface netface = (NetworkInterface) e.nextElement();
Enumeration addresses = netface.getInetAddresses();
while (addresses.hasMoreElements()) {
InetAddress ip = (InetAddress) addresses.nextElement();
if (!ip.isLoopbackAddress() && isIP(ip.getHostAddress())) {
return ip.getHostAddress();
}
}
}
return address;
}
protected static boolean isIP(String hostAddress) {
return hostAddress.split("[.]").length == 4;
}
/**
* Returns the HTML text for the list of services deployed.
* This can be delegated to another Class as well
* where it will handle more options of GET messages.
*
* @param prefix to be used for the Service names
* @return the HTML to be displayed as a String
*/
protected String getServicesHTML(String prefix) {
Map services = cfgCtx.getAxisConfiguration().getServices();
Hashtable erroneousServices = cfgCtx.getAxisConfiguration().getFaultyServices();
boolean servicesFound = false;
StringBuilder resultBuf = new StringBuilder();
resultBuf.append("<html><head><title>Axis2: Services</title></head>" + "<body>");
if ((services != null) && !services.isEmpty()) {
servicesFound = true;
resultBuf.append("<h2>" + "Deployed services" + "</h2>");
for (Object service : services.values()) {
AxisService axisService = (AxisService) service;
Parameter parameter = axisService.getParameter(
NhttpConstants.HIDDEN_SERVICE_PARAM_NAME);
if (axisService.getName().startsWith("__") ||
(parameter != null && JavaUtils.isTrueExplicitly(parameter.getValue()))) {
continue; // skip private services
}
Iterator iterator = axisService.getOperations();
resultBuf.append("<h3><a href=\"").append(prefix).append(axisService.getName()).append(
"?wsdl\">").append(axisService.getName()).append("</a></h3>");
if (iterator.hasNext()) {
resultBuf.append("Available operations <ul>");
for (; iterator.hasNext();) {
AxisOperation axisOperation = (AxisOperation) iterator.next();
resultBuf.append("<li>").append(
axisOperation.getName().getLocalPart()).append("</li>");
}
resultBuf.append("</ul>");
} else {
resultBuf.append("No operations specified for this service");
}
}
}
if ((erroneousServices != null) && !erroneousServices.isEmpty()) {
servicesFound = true;
resultBuf.append("<hr><h2><font color=\"blue\">Faulty Services</font></h2>");
Enumeration faultyservices = erroneousServices.keys();
while (faultyservices.hasMoreElements()) {
String faultyserviceName = (String) faultyservices.nextElement();
resultBuf.append("<h3><font color=\"blue\">").append(
faultyserviceName).append("</font></h3>");
}
}
if (!servicesFound) {
resultBuf.append("<h2>There are no services deployed</h2>");
}
resultBuf.append("</body></html>");
return resultBuf.toString();
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.testFramework;
import com.intellij.ToolExtensionPoints;
import com.intellij.analysis.AnalysisScope;
import com.intellij.codeInspection.GlobalInspectionTool;
import com.intellij.codeInspection.InspectionEP;
import com.intellij.codeInspection.InspectionManager;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.deadCode.UnusedDeclarationInspection;
import com.intellij.codeInspection.deadCode.UnusedDeclarationInspectionBase;
import com.intellij.codeInspection.deadCode.UnusedDeclarationPresentation;
import com.intellij.codeInspection.ex.*;
import com.intellij.codeInspection.reference.EntryPoint;
import com.intellij.codeInspection.reference.RefElement;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ex.PathManagerEx;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.ExtensionPoint;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.LanguageLevelProjectExtension;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiManager;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.testFramework.fixtures.impl.CodeInsightTestFixtureImpl;
import com.intellij.testFramework.fixtures.impl.GlobalInspectionContextForTests;
import com.intellij.util.ArrayUtil;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.io.File;
/**
* @author max
* @since Apr 11, 2002
*/
@SuppressWarnings({"HardCodedStringLiteral"})
public abstract class InspectionTestCase extends PsiTestCase {
private static final Logger LOG = Logger.getInstance("#com.intellij.testFramework.InspectionTestCase");
private EntryPoint myUnusedCodeExtension;
private VirtualFile ext_src;
protected static GlobalInspectionToolWrapper getUnusedDeclarationWrapper() {
InspectionEP ep = new InspectionEP();
ep.presentation = UnusedDeclarationPresentation.class.getName();
ep.implementationClass = UnusedDeclarationInspection.class.getName();
ep.shortName = UnusedDeclarationInspectionBase.SHORT_NAME;
return new GlobalInspectionToolWrapper(ep);
}
public InspectionManagerEx getManager() {
return (InspectionManagerEx)InspectionManager.getInstance(myProject);
}
public void doTest(@NonNls String folderName, LocalInspectionTool tool) {
doTest(folderName, new LocalInspectionToolWrapper(tool));
}
public void doTest(@NonNls String folderName, GlobalInspectionTool tool) {
doTest(folderName, new GlobalInspectionToolWrapper(tool));
}
public void doTest(@NonNls String folderName, GlobalInspectionTool tool, boolean checkRange) {
doTest(folderName, new GlobalInspectionToolWrapper(tool), checkRange);
}
public void doTest(@NonNls String folderName, GlobalInspectionTool tool, boolean checkRange, boolean runDeadCodeFirst) {
doTest(folderName, new GlobalInspectionToolWrapper(tool), "java 1.4", checkRange, runDeadCodeFirst);
}
public void doTest(@NonNls String folderName, InspectionToolWrapper tool) {
doTest(folderName, tool, "java 1.4");
}
public void doTest(@NonNls String folderName, InspectionToolWrapper tool, final boolean checkRange) {
doTest(folderName, tool, "java 1.4", checkRange);
}
public void doTest(@NonNls String folderName, LocalInspectionTool tool, @NonNls final String jdkName) {
doTest(folderName, new LocalInspectionToolWrapper(tool), jdkName);
}
public void doTest(@NonNls String folderName, InspectionToolWrapper tool, @NonNls final String jdkName) {
doTest(folderName, tool, jdkName, false);
}
public void doTest(@NonNls String folderName, InspectionToolWrapper tool, @NonNls final String jdkName, boolean checkRange) {
doTest(folderName, tool, jdkName, checkRange, false);
}
public void doTest(@NonNls String folderName,
InspectionToolWrapper toolWrapper,
@NonNls final String jdkName,
boolean checkRange,
boolean runDeadCodeFirst,
InspectionToolWrapper... additional) {
final String testDir = getTestDataPath() + "/" + folderName;
GlobalInspectionContextImpl context = runTool(testDir, jdkName, runDeadCodeFirst, toolWrapper, additional);
InspectionTestUtil.compareToolResults(context, toolWrapper, checkRange, testDir);
}
protected void runTool(@NonNls final String testDir, @NonNls final String jdkName, final InspectionToolWrapper tool) {
runTool(testDir, jdkName, false, tool);
}
protected GlobalInspectionContextImpl runTool(final String testDir,
final String jdkName,
boolean runDeadCodeFirst,
@NotNull InspectionToolWrapper toolWrapper,
@NotNull InspectionToolWrapper... additional) {
final VirtualFile[] sourceDir = new VirtualFile[1];
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
try {
setupRootModel(testDir, sourceDir, jdkName);
}
catch (Exception e) {
LOG.error(e);
}
}
});
VirtualFile projectDir = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(testDir));
AnalysisScope scope = createAnalysisScope(sourceDir[0].equals(projectDir) ? projectDir : sourceDir[0].getParent());
InspectionManagerEx inspectionManager = (InspectionManagerEx)InspectionManager.getInstance(getProject());
InspectionToolWrapper[] toolWrappers = runDeadCodeFirst ? new InspectionToolWrapper []{getUnusedDeclarationWrapper(), toolWrapper} : new InspectionToolWrapper []{toolWrapper};
toolWrappers = ArrayUtil.mergeArrays(toolWrappers, additional);
final GlobalInspectionContextForTests globalContext =
CodeInsightTestFixtureImpl.createGlobalContextForTool(scope, getProject(), inspectionManager, toolWrappers);
InspectionTestUtil.runTool(toolWrapper, scope, globalContext);
return globalContext;
}
@NotNull
protected AnalysisScope createAnalysisScope(VirtualFile sourceDir) {
PsiManager psiManager = PsiManager.getInstance(myProject);
return new AnalysisScope(psiManager.findDirectory(sourceDir));
}
protected void setupRootModel(final String testDir, final VirtualFile[] sourceDir, final String sdkName) {
VirtualFile projectDir = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(testDir));
assertNotNull("could not find project dir " + testDir, projectDir);
sourceDir[0] = projectDir.findChild("src");
if (sourceDir[0] == null) {
sourceDir[0] = projectDir;
}
// IMPORTANT! The jdk must be obtained in a way it is obtained in the normal program!
//ProjectJdkEx jdk = ProjectJdkTable.getInstance().getInternalJdk();
PsiTestUtil.removeAllRoots(myModule, getTestProjectSdk());
PsiTestUtil.addContentRoot(myModule, projectDir);
PsiTestUtil.addSourceRoot(myModule, sourceDir[0]);
ext_src = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(testDir + "/ext_src"));
if (ext_src != null) {
PsiTestUtil.addSourceRoot(myModule, ext_src);
}
}
@Override
protected void setUp() throws Exception {
super.setUp();
ExtensionPoint<EntryPoint> point = Extensions.getRootArea().getExtensionPoint(ToolExtensionPoints.DEAD_CODE_TOOL);
myUnusedCodeExtension = new EntryPoint() {
@NotNull
@Override
public String getDisplayName() {
return "duh";
}
@Override
public boolean isEntryPoint(@NotNull RefElement refElement, @NotNull PsiElement psiElement) {
return isEntryPoint(psiElement);
}
@Override
public boolean isEntryPoint(@NotNull PsiElement psiElement) {
return ext_src != null && VfsUtilCore.isAncestor(ext_src, PsiUtilCore.getVirtualFile(psiElement), false);
}
@Override
public boolean isSelected() {
return false;
}
@Override
public void setSelected(boolean selected) {
}
@Override
public void readExternal(Element element) {
}
@Override
public void writeExternal(Element element) {
}
};
point.registerExtension(myUnusedCodeExtension);
}
@Override
protected void tearDown() throws Exception {
ExtensionPoint<EntryPoint> point = Extensions.getRootArea().getExtensionPoint(ToolExtensionPoints.DEAD_CODE_TOOL);
point.unregisterExtension(myUnusedCodeExtension);
myUnusedCodeExtension = null;
ext_src = null;
super.tearDown();
}
@Override
protected void setUpJdk() {
}
protected Sdk getTestProjectSdk() {
Sdk sdk = IdeaTestUtil.getMockJdk17();
LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_5);
return sdk;
}
@Override
@NonNls
protected String getTestDataPath() {
return PathManagerEx.getTestDataPath() + "/inspection/";
}
}
| |
/**
*
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "[]"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright 2016 Alibaba Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.taobao.weex.ui.view.refresh.wrapper;
import android.content.Context;
import android.support.v7.widget.OrientationHelper;
import android.util.AttributeSet;
import android.view.View;
import android.widget.FrameLayout;
import com.taobao.weex.ui.view.refresh.core.WXRefreshView;
import com.taobao.weex.ui.view.refresh.core.WXSwipeLayout;
/**
* BounceView(SwipeLayout) contains Scroller/List and refresh/loading view
* @param <T> InnerView
*/
public abstract class BaseBounceView<T extends View> extends FrameLayout {
private int mOrientation = OrientationHelper.VERTICAL;
protected WXSwipeLayout swipeLayout;
private T innerView;
public BaseBounceView(Context context,int orientation) {
this(context, null,orientation);
}
public BaseBounceView(Context context, AttributeSet attrs,int orientataion) {
super(context, attrs);
mOrientation = orientataion;
init(context);
}
public int getOrientation(){
return mOrientation;
}
private void init(Context context) {
createBounceView(context);
}
boolean isVertical(){
return mOrientation==OrientationHelper.VERTICAL;
}
public void setOnRefreshListener(WXSwipeLayout.WXOnRefreshListener onRefreshListener) {
if (swipeLayout != null)
swipeLayout.setOnRefreshListener(onRefreshListener);
}
public void setOnLoadingListener(WXSwipeLayout.WXOnLoadingListener onLoadingListener) {
if (swipeLayout != null)
swipeLayout.setOnLoadingListener(onLoadingListener);
}
public void finishPullRefresh() {
if (swipeLayout != null)
swipeLayout.finishPullRefresh();
}
public void finishPullLoad() {
if (swipeLayout != null)
swipeLayout.finishPullLoad();
}
/**
* Init Swipelayout
*/
private WXSwipeLayout createBounceView(Context context) {
swipeLayout = new WXSwipeLayout(context);
swipeLayout.setLayoutParams(new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT));
innerView = setInnerView(context);
if (innerView == null)
return null;
swipeLayout.addView(innerView, new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT));
addView(swipeLayout, LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
return swipeLayout;
}
/**
* @return the child of swipelayout : recyclerview or scrollview
*/
public T getInnerView() {
return innerView;
}
public abstract T setInnerView(Context context);
/**
*
* @param headerView should be {@link WXRefreshView}
*/
public void setHeaderView(View headerView) {
setRefreshEnable(true);
if (swipeLayout != null)
if (swipeLayout.getHeaderView() != null)
swipeLayout.getHeaderView().setRefreshView(headerView);
}
/**
*
* @param footerView should be {@link WXRefreshView}
*/
public void setFooterView(View footerView) {
setLoadmoreEnable(true);
if (swipeLayout != null)
if (swipeLayout.getFooterView() != null)
swipeLayout.getFooterView().setRefreshView(footerView);
}
public void setRefreshEnable(boolean enable) {
if (swipeLayout != null)
swipeLayout.setPullRefreshEnable(enable);
}
public void setLoadmoreEnable(boolean enable) {
if (swipeLayout != null)
swipeLayout.setPullLoadEnable(enable);
}
public WXSwipeLayout getSwipeLayout() {
return swipeLayout;
}
public abstract void onRefreshingComplete();
public abstract void onLoadmoreComplete();
}
| |
/*
* Copyright 2012-2014 eBay Software Foundation and selendroid committers.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.selendroid.server;
import io.selendroid.server.common.BaseRequestHandler;
import io.selendroid.server.common.BaseServlet;
import io.selendroid.server.common.Response;
import io.selendroid.server.common.SelendroidResponse;
import io.selendroid.server.common.StatusCode;
import io.selendroid.server.common.exceptions.AppCrashedException;
import io.selendroid.server.common.exceptions.StaleElementReferenceException;
import io.selendroid.server.common.http.HttpRequest;
import io.selendroid.server.common.http.HttpResponse;
import io.selendroid.server.common.http.TrafficCounter;
import io.selendroid.server.extension.ExtensionLoader;
import io.selendroid.server.handler.*;
import io.selendroid.server.handler.alert.Alert;
import io.selendroid.server.handler.alert.AlertAccept;
import io.selendroid.server.handler.alert.AlertDismiss;
import io.selendroid.server.handler.alert.AlertSendKeys;
import io.selendroid.server.handler.extension.ExtensionCallHandler;
import io.selendroid.server.handler.network.GetNetworkConnectionType;
import io.selendroid.server.handler.script.ExecuteAsyncScript;
import io.selendroid.server.handler.script.ExecuteScript;
import io.selendroid.server.handler.timeouts.AsyncTimeoutHandler;
import io.selendroid.server.handler.timeouts.SetImplicitWaitTimeout;
import io.selendroid.server.handler.timeouts.TimeoutsHandler;
import io.selendroid.server.model.DefaultSelendroidDriver;
import io.selendroid.server.model.SelendroidDriver;
import io.selendroid.server.util.SelendroidLogger;
import java.net.URLDecoder;
public class AndroidServlet extends BaseServlet {
private SelendroidDriver driver = null;
protected ExtensionLoader extensionLoader = null;
public AndroidServlet(SelendroidDriver driver, ExtensionLoader extensionLoader) {
this.driver = driver;
this.extensionLoader = extensionLoader;
init();
}
protected void init() {
register(postHandler, new NewSession("/wd/hub/session"));
register(getHandler, new ListSessions("/wd/hub/sessions"));
register(getHandler, new GetCapabilities("/wd/hub/session/:sessionId"));
register(deleteHandler, new DeleteSession("/wd/hub/session/:sessionId"));
register(getHandler, new Alert("/wd/hub/session/:sessionId/alert_text"));
register(postHandler, new AlertSendKeys("/wd/hub/session/:sessionId/alert_text"));
register(postHandler, new AlertAccept("/wd/hub/session/:sessionId/accept_alert"));
register(postHandler, new GoBack("/wd/hub/session/:sessionId/back"));
register(getHandler, new GetCookies("/wd/hub/session/:sessionId/cookie"));
register(postHandler, new AddCookie("/wd/hub/session/:sessionId/cookie"));
register(deleteHandler, new DeleteCookies("/wd/hub/session/:sessionId/cookie"));
register(deleteHandler, new DeleteNamedCookie("/wd/hub/session/:sessionId/cookie/:name"));
register(postHandler, new AlertDismiss("/wd/hub/session/:sessionId/dismiss_alert"));
register(postHandler, new FindElement("/wd/hub/session/:sessionId/element"));
register(postHandler, new FindElements("/wd/hub/session/:sessionId/elements"));
register(getHandler, new GetElementAttribute(
"/wd/hub/session/:sessionId/element/:id/attribute/:name"));
register(postHandler, new ClearElement("/wd/hub/session/:sessionId/element/:id/clear"));
// register(postHandler, new ClickElement("/wd/hub/session/:sessionId/element/:id/click"));
register(getHandler,
new GetElementDisplayed("/wd/hub/session/:sessionId/element/:id/displayed"));
register(postHandler, new FindChildElement("/wd/hub/session/:sessionId/element/:id/element"));
register(postHandler, new FindChildElements("/wd/hub/session/:sessionId/element/:id/elements"));
register(getHandler, new GetElementEnabled("/wd/hub/session/:sessionId/element/:id/enabled"));
register(getHandler, new ElementLocation("/wd/hub/session/:sessionId/element/:id/location"));
register(getHandler, new GetElementLocationInView("/wd/hub/session/:sessionId/element/:id/location_in_view"));
register(getHandler, new GetElementTagName("/wd/hub/session/:sessionId/element/:id/name"));
register(getHandler, new GetElementSelected("/wd/hub/session/:sessionId/element/:id/selected"));
register(getHandler, new LogElement("/wd/hub/session/:sessionId/element/:id/source"));
register(postHandler, new SubmitForm("/wd/hub/session/:sessionId/element/:id/submit"));
register(getHandler, new GetText("/wd/hub/session/:sessionId/element/:id/text"));
register(postHandler, new SendKeysToElement("/wd/hub/session/:sessionId/element/:id/value"));
register(getHandler, new GetElementSize("/wd/hub/session/:sessionId/element/:id/size"));
register(postHandler, new ExecuteScript("/wd/hub/session/:sessionId/execute"));
register(postHandler, new ExecuteAsyncScript("/wd/hub/session/:sessionId/execute_async"));
register(postHandler, new GoForward("/wd/hub/session/:sessionId/forward"));
register(postHandler, new FrameSwitchHandler("/wd/hub/session/:sessionId/frame"));
register(postHandler, new SendKeys("/wd/hub/session/:sessionId/keys"));
register(postHandler, new Refresh("/wd/hub/session/:sessionId/refresh"));
register(getHandler, new CaptureScreenshot("/wd/hub/session/:sessionId/screenshot"));
register(getHandler, new LogElementTree("/wd/hub/session/:sessionId/source"));
register(postHandler, new TimeoutsHandler("/wd/hub/session/:sessionId/timeouts"));
register(postHandler, new AsyncTimeoutHandler(
"/wd/hub/session/:sessionId/timeouts/async_script"));
register(postHandler, new SetImplicitWaitTimeout(
"/wd/hub/session/:sessionId/timeouts/implicit_wait"));
register(getHandler, new GetPageTitle("/wd/hub/session/:sessionId/title"));
register(getHandler, new GetCurrentUrl("/wd/hub/session/:sessionId/url"));
register(postHandler, new OpenUrl("/wd/hub/session/:sessionId/url"));
register(postHandler, new SwitchContext("/wd/hub/session/:sessionId/window"));
register(getHandler, new GetWindowSize("/wd/hub/session/:sessionId/window/:windowHandle/size"));
register(getHandler, new GetContext("/wd/hub/session/:sessionId/window_handle"));
register(getHandler, new GetContexts("/wd/hub/session/:sessionId/window_handles"));
register(getHandler, new GetScreenOrientation("/wd/hub/session/:sessionId/orientation"));
register(postHandler, new RotateScreen("/wd/hub/session/:sessionId/orientation"));
// Advanced Touch API
register(postHandler, new SingleTapOnElement("/wd/hub/session/:sessionId/touch/click"));
register(postHandler, new Down("/wd/hub/session/:sessionId/touch/down"));
register(postHandler, new Up("/wd/hub/session/:sessionId/touch/up"));
register(postHandler, new Move("/wd/hub/session/:sessionId/touch/move"));
register(postHandler, new Scroll("/wd/hub/session/:sessionId/touch/scroll"));
register(postHandler, new DoubleTapOnElement("/wd/hub/session/:sessionId/touch/doubleclick"));
register(postHandler, new LongPressOnElement("/wd/hub/session/:sessionId/touch/longclick"));
register(postHandler, new Flick("/wd/hub/session/:sessionId/touch/flick"));
// Track-ball functionality
register(postHandler, new Roll("/wd/hub/session/:sessionId/trackball/roll"));
// The new endpoints for context switching coming with Selenium 3.0 & mobile spec
register(getHandler, new GetNetworkConnectionType("/wd/hub/session/:sessionId/network_connection"));
register(getHandler, new GetContext("/wd/hub/session/:sessionId/context"));
register(getHandler, new GetContexts("/wd/hub/session/:sessionId/contexts"));
register(postHandler, new SwitchContext("/wd/hub/session/:sessionId/context"));
// Custom extensions to wire protocol
register(getHandler, new GetScreenState("/wd/hub/session/:sessionId/selendroid/screen/brightness"));
register(postHandler, new SetScreenState("/wd/hub/session/:sessionId/selendroid/screen/brightness"));
register(postHandler, new InspectorTap("/wd/hub/session/:sessionId/tap/2"));
register(getHandler, new GetCommandConfiguration(
"/wd/hub/session/:sessionId/selendroid/configure/command/:command"));
register(postHandler, new SetCommandConfiguration(
"/wd/hub/session/:sessionId/selendroid/configure/command/:command"));
register(postHandler, new ForceGcExplicitly("/wd/hub/session/:sessionId/selendroid/gc"));
register(postHandler, new SetSystemProperty("/wd/hub/session/:sessionId/selendroid/systemProperty"));
// Endpoints to send app to background and resume it
register(postHandler, new BackgroundApp("/wd/hub/session/:sessionId/selendroid/background"));
register(postHandler, new ResumeApp("/wd/hub/session/:sessionId/selendroid/resume"));
// Endpoints to add to and read call logs
register(postHandler, new AddCallLog("/wd/hub/session/:sessionId/selendroid/addCallLog"));
register(postHandler, new ReadCallLog("/wd/hub/session/:sessionId/selendroid/readCallLog"));
// Handle calls to dynamically loaded handlers
register(postHandler, new ExtensionCallHandler(
"/wd/hub/session/:sessionId/selendroid/extension", extensionLoader));
// Actions sequencing endpoint
register(postHandler, new Actions("/wd/hub/session/:sessionId/actions"));
// currently not yet supported
register(getHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/ime/available_engines"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/ime/active_engine"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/ime/activated"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/ime/deactivate"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/ime/activate"));
register(deleteHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/window"));
register(postHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/window/:windowHandle/size"));
register(postHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/window/:windowHandle/position"));
register(getHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/window/:windowHandle/position"));
register(postHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/window/:windowHandle/maximize"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/element/:id"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/element/active"));
register(getHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/element/:id/equals/:other"));
register(getHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/element/:id/css/:propertyName"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/moveto"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/buttondown"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/buttonup"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/doubleclick"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/location"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/location"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/local_storage"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/local_storage"));
register(deleteHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/local_storage"));
register(getHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/local_storage/key/:key"));
register(deleteHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/local_storage/key/:key"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/local_storage/size"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/location"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/location"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/local_storage"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/local_storage"));
register(deleteHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/local_storage"));
register(getHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/local_storage/key/:key"));
register(deleteHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/local_storage/key/:key"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/local_storage/size"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/session_storage"));
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/session_storage"));
register(deleteHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/session_storage"));
register(getHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/session_storage/key/:key"));
register(deleteHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/session_storage/key/:key"));
register(getHandler, new UnknownCommandHandler(
"/wd/hub/session/:sessionId/session_storage/size"));
// handled in the standalone-server
register(postHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/log"));
register(getHandler, new UnknownCommandHandler("/wd/hub/session/:sessionId/log/types"));
}
private void addHandlerAttributesToRequest(HttpRequest request, String mappedUri) {
String sessionId = getParameter(mappedUri, request.uri(), ":sessionId");
if (sessionId != null) {
request.data().put(SESSION_ID_KEY, sessionId);
}
String command = getParameter(mappedUri, request.uri(), ":command");
if (command != null) {
request.data().put(COMMAND_NAME_KEY, command);
}
String id = getParameter(mappedUri, request.uri(), ":id");
if (id != null) {
request.data().put(ELEMENT_ID_KEY, URLDecoder.decode(id));
}
String name = getParameter(mappedUri, request.uri(), ":name");
if (name != null) {
request.data().put(NAME_ID_KEY, name);
}
request.data().put(DRIVER_KEY, driver);
}
@Override
public void handleRequest(HttpRequest request, HttpResponse response, BaseRequestHandler handler) {
if ("/favicon.ico".equals(request.uri()) && handler == null) {
response.setStatus(404).end();
return;
} else if (handler == null) {
response.setStatus(404).end();
return;
}
Response result;
try {
addHandlerAttributesToRequest(request, handler.getMappedUri());
if (!handler.commandAllowedWithAlertPresentInWebViewMode()) {
SelendroidDriver driver =
(SelendroidDriver) request.data().get(AndroidServlet.DRIVER_KEY);
if (driver != null && driver.isAlertPresent()) {
result =
new SelendroidResponse(handler.getSessionId(request),
StatusCode.UNEXPECTED_ALERT_OPEN,
"Unhandled Alert present");
handleResponse(request, response, (SelendroidResponse) result);
return;
}
}
result = handler.handle(request);
} catch (StaleElementReferenceException se) {
try {
SelendroidLogger.error("StaleElementReferenceException", se);
String sessionId = getParameter(handler.getMappedUri(), request.uri(), ":sessionId");
result = new SelendroidResponse(sessionId, StatusCode.STALE_ELEMENT_REFERENCE, se);
} catch (Exception e) {
SelendroidLogger.error("Error responding to StaleElementReferenceException", e);
replyWithServerError(response);
return;
}
} catch (AppCrashedException ae) {
try {
SelendroidLogger.error("App crashed when handling request", ae);
String sessionId = getParameter(handler.getMappedUri(), request.uri(), ":sessionId");
result = new SelendroidResponse(sessionId, StatusCode.UNKNOWN_ERROR, ae);
} catch (Exception e) {
SelendroidLogger.error("Error responding to app crash", e);
replyWithServerError(response);
return;
}
} catch (Exception e) {
SelendroidLogger.error("Error handling request.", e);
replyWithServerError(response);
return;
}
handleResponse(request, response, (SelendroidResponse) result);
String trafficStatistics = String.format(
"traffic_stats: rx_bytes %d tx_bytes %d",
TrafficCounter.readBytes(),
TrafficCounter.writtenBytes());
SelendroidLogger.info(trafficStatistics);
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package afirsraftgarrier.demoandroid.androidapi.app;
import android.app.Activity;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Bundle;
import android.os.RemoteException;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.Process;
import android.os.RemoteCallbackList;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
// Need the following import to get access to the app resources, since this
// class is in a sub-package.
import afirsraftgarrier.demoandroid.R;
/**
* This is an example of implementing an application service that runs in a
* different process than the application. Because it can be in another
* process, we must use IPC to interact with it. The
* {@link Controller} and {@link Binding} classes
* show how to interact with the service.
*
* <p>Note that most applications <strong>do not</strong> need to deal with
* the complexity shown here. If your application simply has a service
* running in its own process, the {@link LocalService} sample shows a much
* simpler way to interact with it.
*/
public class RemoteService extends Service {
/**
* This is a list of callbacks that have been registered with the
* service. Note that this is package scoped (instead of private) so
* that it can be accessed more efficiently from inner classes.
*/
final RemoteCallbackList<IRemoteServiceCallback> mCallbacks
= new RemoteCallbackList<IRemoteServiceCallback>();
int mValue = 0;
NotificationManager mNM;
@Override
public void onCreate() {
mNM = (NotificationManager)getSystemService(NOTIFICATION_SERVICE);
// Display a notification about us starting.
showNotification();
// While this service is running, it will continually increment a
// number. Send the first message that is used to perform the
// increment.
mHandler.sendEmptyMessage(REPORT_MSG);
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.i("RemoteService", "Received start id " + startId + ": " + intent);
return START_NOT_STICKY;
}
@Override
public void onDestroy() {
// Cancel the persistent notification.
mNM.cancel(R.string.remote_service_started);
// Tell the user we stopped.
Toast.makeText(this, R.string.remote_service_stopped, Toast.LENGTH_SHORT).show();
// Unregister all callbacks.
mCallbacks.kill();
// Remove the next pending message to increment the counter, stopping
// the increment loop.
mHandler.removeMessages(REPORT_MSG);
}
// BEGIN_INCLUDE(exposing_a_service)
@Override
public IBinder onBind(Intent intent) {
// Select the interface to return. If your service only implements
// a single interface, you can just return it here without checking
// the Intent.
if (IRemoteService.class.getName().equals(intent.getAction())) {
return mBinder;
}
if (ISecondary.class.getName().equals(intent.getAction())) {
return mSecondaryBinder;
}
return null;
}
/**
* The IRemoteInterface is defined through IDL
*/
private final IRemoteService.Stub mBinder = new IRemoteService.Stub() {
public void registerCallback(IRemoteServiceCallback cb) {
if (cb != null) mCallbacks.register(cb);
}
public void unregisterCallback(IRemoteServiceCallback cb) {
if (cb != null) mCallbacks.unregister(cb);
}
};
/**
* A secondary interface to the service.
*/
private final ISecondary.Stub mSecondaryBinder = new ISecondary.Stub() {
public int getPid() {
return Process.myPid();
}
public void basicTypes(int anInt, long aLong, boolean aBoolean,
float aFloat, double aDouble, String aString) {
}
};
// END_INCLUDE(exposing_a_service)
@Override
public void onTaskRemoved(Intent rootIntent) {
Toast.makeText(this, "Task removed: " + rootIntent, Toast.LENGTH_LONG).show();
}
private static final int REPORT_MSG = 1;
/**
* Our Handler used to execute operations on the main thread. This is used
* to schedule increments of our value.
*/
private final Handler mHandler = new Handler() {
@Override public void handleMessage(Message msg) {
switch (msg.what) {
// It is time to bump the value!
case REPORT_MSG: {
// Up it goes.
int value = ++mValue;
// Broadcast to all clients the new value.
final int N = mCallbacks.beginBroadcast();
for (int i=0; i<N; i++) {
try {
mCallbacks.getBroadcastItem(i).valueChanged(value);
} catch (RemoteException e) {
// The RemoteCallbackList will take care of removing
// the dead object for us.
}
}
mCallbacks.finishBroadcast();
// Repeat every 1 second.
sendMessageDelayed(obtainMessage(REPORT_MSG), 1*1000);
} break;
default:
super.handleMessage(msg);
}
}
};
/**
* Show a notification while this service is running.
*/
private void showNotification() {
// In this sample, we'll use the same text for the ticker and the expanded notification
CharSequence text = getText(R.string.remote_service_started);
// The PendingIntent to launch our activity if the user selects this notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
new Intent(this, Controller.class), 0);
// Set the info for the views that show in the notification panel.
Notification notification = new Notification.Builder(this)
.setSmallIcon(R.drawable.stat_sample) // the status icon
.setTicker(text) // the status text
.setWhen(System.currentTimeMillis()) // the time stamp
.setContentTitle(getText(R.string.remote_service_label)) // the label of the entry
.setContentText(text) // the contents of the entry
.setContentIntent(contentIntent) // The intent to send when the entry is clicked
.build();
// Send the notification.
// We use a string id because it is a unique number. We use it later to cancel.
mNM.notify(R.string.remote_service_started, notification);
}
// ----------------------------------------------------------------------
/**
* <p>Example of explicitly starting and stopping the remove service.
* This demonstrates the implementation of a service that runs in a different
* process than the rest of the application, which is explicitly started and stopped
* as desired.</p>
*
* <p>Note that this is implemented as an inner class only keep the sample
* all together; typically this code would appear in some separate class.
*/
public static class Controller extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.remote_service_controller);
// Watch for button clicks.
Button button = (Button)findViewById(R.id.start);
button.setOnClickListener(mStartListener);
button = (Button)findViewById(R.id.stop);
button.setOnClickListener(mStopListener);
}
private OnClickListener mStartListener = new OnClickListener() {
public void onClick(View v) {
// Make sure the service is started. It will continue running
// until someone calls stopService().
// We use an action code here, instead of explictly supplying
// the component name, so that other packages can replace
// the service.
startService(new Intent(Controller.this, RemoteService.class));
}
};
private OnClickListener mStopListener = new OnClickListener() {
public void onClick(View v) {
// Cancel a previous call to startService(). Note that the
// service will not actually stop at this point if there are
// still bound clients.
stopService(new Intent(Controller.this, RemoteService.class));
}
};
}
// ----------------------------------------------------------------------
/**
* Example of binding and unbinding to the remote service.
* This demonstrates the implementation of a service which the client will
* bind to, interacting with it through an aidl interface.</p>
*
* <p>Note that this is implemented as an inner class only keep the sample
* all together; typically this code would appear in some separate class.
*/
// BEGIN_INCLUDE(calling_a_service)
public static class Binding extends Activity {
/** The primary interface we will be calling on the service. */
IRemoteService mService = null;
/** Another interface we use on the service. */
ISecondary mSecondaryService = null;
Button mKillButton;
TextView mCallbackText;
private boolean mIsBound;
/**
* Standard initialization of this activity. Set up the UI, then wait
* for the user to poke it before doing anything.
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.remote_service_binding);
// Watch for button clicks.
Button button = (Button)findViewById(R.id.bind);
button.setOnClickListener(mBindListener);
button = (Button)findViewById(R.id.unbind);
button.setOnClickListener(mUnbindListener);
mKillButton = (Button)findViewById(R.id.kill);
mKillButton.setOnClickListener(mKillListener);
mKillButton.setEnabled(false);
mCallbackText = (TextView)findViewById(R.id.callback);
mCallbackText.setText("Not attached.");
}
/**
* Class for interacting with the main interface of the service.
*/
private ServiceConnection mConnection = new ServiceConnection() {
public void onServiceConnected(ComponentName className,
IBinder service) {
// This is called when the connection with the service has been
// established, giving us the service object we can use to
// interact with the service. We are communicating with our
// service through an IDL interface, so get a client-side
// representation of that from the raw service object.
mService = IRemoteService.Stub.asInterface(service);
mKillButton.setEnabled(true);
mCallbackText.setText("Attached.");
// We want to monitor the service for as long as we are
// connected to it.
try {
mService.registerCallback(mCallback);
} catch (RemoteException e) {
// In this case the service has crashed before we could even
// do anything with it; we can count on soon being
// disconnected (and then reconnected if it can be restarted)
// so there is no need to do anything here.
}
// As part of the sample, tell the user what happened.
Toast.makeText(Binding.this, R.string.remote_service_connected,
Toast.LENGTH_SHORT).show();
}
public void onServiceDisconnected(ComponentName className) {
// This is called when the connection with the service has been
// unexpectedly disconnected -- that is, its process crashed.
mService = null;
mKillButton.setEnabled(false);
mCallbackText.setText("Disconnected.");
// As part of the sample, tell the user what happened.
Toast.makeText(Binding.this, R.string.remote_service_disconnected,
Toast.LENGTH_SHORT).show();
}
};
/**
* Class for interacting with the secondary interface of the service.
*/
private ServiceConnection mSecondaryConnection = new ServiceConnection() {
public void onServiceConnected(ComponentName className,
IBinder service) {
// Connecting to a secondary interface is the same as any
// other interface.
mSecondaryService = ISecondary.Stub.asInterface(service);
mKillButton.setEnabled(true);
}
public void onServiceDisconnected(ComponentName className) {
mSecondaryService = null;
mKillButton.setEnabled(false);
}
};
private OnClickListener mBindListener = new OnClickListener() {
public void onClick(View v) {
// Establish a couple connections with the service, binding
// by interface names. This allows other applications to be
// installed that replace the remote service by implementing
// the same interface.
Intent intent = new Intent(Binding.this, RemoteService.class);
intent.setAction(IRemoteService.class.getName());
bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
intent.setAction(ISecondary.class.getName());
bindService(intent, mSecondaryConnection, Context.BIND_AUTO_CREATE);
mIsBound = true;
mCallbackText.setText("Binding.");
}
};
private OnClickListener mUnbindListener = new OnClickListener() {
public void onClick(View v) {
if (mIsBound) {
// If we have received the service, and hence registered with
// it, then now is the time to unregister.
if (mService != null) {
try {
mService.unregisterCallback(mCallback);
} catch (RemoteException e) {
// There is nothing special we need to do if the service
// has crashed.
}
}
// Detach our existing connection.
unbindService(mConnection);
unbindService(mSecondaryConnection);
mKillButton.setEnabled(false);
mIsBound = false;
mCallbackText.setText("Unbinding.");
}
}
};
private OnClickListener mKillListener = new OnClickListener() {
public void onClick(View v) {
// To kill the process hosting our service, we need to know its
// PID. Conveniently our service has a call that will return
// to us that information.
if (mSecondaryService != null) {
try {
int pid = mSecondaryService.getPid();
// Note that, though this API allows us to request to
// kill any process based on its PID, the kernel will
// still impose standard restrictions on which PIDs you
// are actually able to kill. Typically this means only
// the process running your application and any additional
// processes created by that app as shown here; packages
// sharing a common UID will also be able to kill each
// other's processes.
Process.killProcess(pid);
mCallbackText.setText("Killed service process.");
} catch (RemoteException ex) {
// Recover gracefully from the process hosting the
// server dying.
// Just for purposes of the sample, put up a notification.
Toast.makeText(Binding.this,
R.string.remote_call_failed,
Toast.LENGTH_SHORT).show();
}
}
}
};
// ----------------------------------------------------------------------
// Code showing how to deal with callbacks.
// ----------------------------------------------------------------------
/**
* This implementation is used to receive callbacks from the remote
* service.
*/
private IRemoteServiceCallback mCallback = new IRemoteServiceCallback.Stub() {
/**
* This is called by the remote service regularly to tell us about
* new values. Note that IPC calls are dispatched through a thread
* pool running in each process, so the code executing here will
* NOT be running in our main thread like most other things -- so,
* to update the UI, we need to use a Handler to hop over there.
*/
public void valueChanged(int value) {
mHandler.sendMessage(mHandler.obtainMessage(BUMP_MSG, value, 0));
}
};
private static final int BUMP_MSG = 1;
private Handler mHandler = new Handler() {
@Override public void handleMessage(Message msg) {
switch (msg.what) {
case BUMP_MSG:
mCallbackText.setText("Received from service: " + msg.arg1);
break;
default:
super.handleMessage(msg);
}
}
};
}
// END_INCLUDE(calling_a_service)
// ----------------------------------------------------------------------
/**
* Examples of behavior of different bind flags.</p>
*/
// BEGIN_INCLUDE(calling_a_service)
public static class BindingOptions extends Activity {
ServiceConnection mCurConnection;
TextView mCallbackText;
Intent mBindIntent;
class MyServiceConnection implements ServiceConnection {
final boolean mUnbindOnDisconnect;
public MyServiceConnection() {
mUnbindOnDisconnect = false;
}
public MyServiceConnection(boolean unbindOnDisconnect) {
mUnbindOnDisconnect = unbindOnDisconnect;
}
public void onServiceConnected(ComponentName className,
IBinder service) {
if (mCurConnection != this) {
return;
}
mCallbackText.setText("Attached.");
Toast.makeText(BindingOptions.this, R.string.remote_service_connected,
Toast.LENGTH_SHORT).show();
}
public void onServiceDisconnected(ComponentName className) {
if (mCurConnection != this) {
return;
}
mCallbackText.setText("Disconnected.");
Toast.makeText(BindingOptions.this, R.string.remote_service_disconnected,
Toast.LENGTH_SHORT).show();
if (mUnbindOnDisconnect) {
unbindService(this);
mCurConnection = null;
Toast.makeText(BindingOptions.this, R.string.remote_service_unbind_disconn,
Toast.LENGTH_SHORT).show();
}
}
}
/**
* Standard initialization of this activity. Set up the UI, then wait
* for the user to poke it before doing anything.
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.remote_binding_options);
// Watch for button clicks.
Button button = (Button)findViewById(R.id.bind_normal);
button.setOnClickListener(mBindNormalListener);
button = (Button)findViewById(R.id.bind_not_foreground);
button.setOnClickListener(mBindNotForegroundListener);
button = (Button)findViewById(R.id.bind_above_client);
button.setOnClickListener(mBindAboveClientListener);
button = (Button)findViewById(R.id.bind_allow_oom);
button.setOnClickListener(mBindAllowOomListener);
button = (Button)findViewById(R.id.bind_waive_priority);
button.setOnClickListener(mBindWaivePriorityListener);
button = (Button)findViewById(R.id.bind_important);
button.setOnClickListener(mBindImportantListener);
button = (Button)findViewById(R.id.bind_with_activity);
button.setOnClickListener(mBindWithActivityListener);
button = (Button)findViewById(R.id.unbind);
button.setOnClickListener(mUnbindListener);
mCallbackText = (TextView)findViewById(R.id.callback);
mCallbackText.setText("Not attached.");
mBindIntent = new Intent(this, RemoteService.class);
mBindIntent.setAction(IRemoteService.class.getName());
}
private OnClickListener mBindNormalListener = new OnClickListener() {
public void onClick(View v) {
if (mCurConnection != null) {
unbindService(mCurConnection);
mCurConnection = null;
}
ServiceConnection conn = new MyServiceConnection();
if (bindService(mBindIntent, conn, Context.BIND_AUTO_CREATE)) {
mCurConnection = conn;
}
}
};
private OnClickListener mBindNotForegroundListener = new OnClickListener() {
public void onClick(View v) {
if (mCurConnection != null) {
unbindService(mCurConnection);
mCurConnection = null;
}
ServiceConnection conn = new MyServiceConnection();
if (bindService(mBindIntent, conn,
Context.BIND_AUTO_CREATE | Context.BIND_NOT_FOREGROUND)) {
mCurConnection = conn;
}
}
};
private OnClickListener mBindAboveClientListener = new OnClickListener() {
public void onClick(View v) {
if (mCurConnection != null) {
unbindService(mCurConnection);
mCurConnection = null;
}
ServiceConnection conn = new MyServiceConnection();
if (bindService(mBindIntent,
conn, Context.BIND_AUTO_CREATE | Context.BIND_ABOVE_CLIENT)) {
mCurConnection = conn;
}
}
};
private OnClickListener mBindAllowOomListener = new OnClickListener() {
public void onClick(View v) {
if (mCurConnection != null) {
unbindService(mCurConnection);
mCurConnection = null;
}
ServiceConnection conn = new MyServiceConnection();
if (bindService(mBindIntent, conn,
Context.BIND_AUTO_CREATE | Context.BIND_ALLOW_OOM_MANAGEMENT)) {
mCurConnection = conn;
}
}
};
private OnClickListener mBindWaivePriorityListener = new OnClickListener() {
public void onClick(View v) {
if (mCurConnection != null) {
unbindService(mCurConnection);
mCurConnection = null;
}
ServiceConnection conn = new MyServiceConnection(true);
if (bindService(mBindIntent, conn,
Context.BIND_AUTO_CREATE | Context.BIND_WAIVE_PRIORITY)) {
mCurConnection = conn;
}
}
};
private OnClickListener mBindImportantListener = new OnClickListener() {
public void onClick(View v) {
if (mCurConnection != null) {
unbindService(mCurConnection);
mCurConnection = null;
}
ServiceConnection conn = new MyServiceConnection();
if (bindService(mBindIntent, conn,
Context.BIND_AUTO_CREATE | Context.BIND_IMPORTANT)) {
mCurConnection = conn;
}
}
};
private OnClickListener mBindWithActivityListener = new OnClickListener() {
public void onClick(View v) {
if (mCurConnection != null) {
unbindService(mCurConnection);
mCurConnection = null;
}
ServiceConnection conn = new MyServiceConnection();
if (bindService(mBindIntent, conn,
Context.BIND_AUTO_CREATE | Context.BIND_ADJUST_WITH_ACTIVITY
| Context.BIND_WAIVE_PRIORITY)) {
mCurConnection = conn;
}
}
};
private OnClickListener mUnbindListener = new OnClickListener() {
public void onClick(View v) {
if (mCurConnection != null) {
unbindService(mCurConnection);
mCurConnection = null;
}
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import org.apache.commons.io.IOUtils;
import org.junit.Test;
import org.apache.commons.lang3.text.translate.CharSequenceTranslator;
import org.apache.commons.lang3.text.translate.NumericEntityEscaper;
/**
* Unit tests for {@link StringEscapeUtils}.
*
* @version $Id: StringEscapeUtilsTest.java 1199724 2011-11-09 12:51:52Z sebb $
*/
public class StringEscapeUtilsTest {
private final static String FOO = "foo";
@Test
public void testConstructor() {
assertNotNull(new StringEscapeUtils());
Constructor<?>[] cons = StringEscapeUtils.class.getDeclaredConstructors();
assertEquals(1, cons.length);
assertTrue(Modifier.isPublic(cons[0].getModifiers()));
assertTrue(Modifier.isPublic(StringEscapeUtils.class.getModifiers()));
assertFalse(Modifier.isFinal(StringEscapeUtils.class.getModifiers()));
}
@Test
public void testEscapeJava() throws IOException {
assertEquals(null, StringEscapeUtils.escapeJava(null));
try {
StringEscapeUtils.ESCAPE_JAVA.translate(null, null);
fail();
} catch (IOException ex) {
fail();
} catch (IllegalArgumentException ex) {
}
try {
StringEscapeUtils.ESCAPE_JAVA.translate("", null);
fail();
} catch (IOException ex) {
fail();
} catch (IllegalArgumentException ex) {
}
assertEscapeJava("empty string", "", "");
assertEscapeJava(FOO, FOO);
assertEscapeJava("tab", "\\t", "\t");
assertEscapeJava("backslash", "\\\\", "\\");
assertEscapeJava("single quote should not be escaped", "'", "'");
assertEscapeJava("\\\\\\b\\t\\r", "\\\b\t\r");
assertEscapeJava("\\u1234", "\u1234");
assertEscapeJava("\\u0234", "\u0234");
assertEscapeJava("\\u00EF", "\u00ef");
assertEscapeJava("\\u0001", "\u0001");
assertEscapeJava("Should use capitalized Unicode hex", "\\uABCD", "\uabcd");
assertEscapeJava("He didn't say, \\\"stop!\\\"",
"He didn't say, \"stop!\"");
assertEscapeJava("non-breaking space", "This space is non-breaking:" + "\\u00A0",
"This space is non-breaking:\u00a0");
assertEscapeJava("\\uABCD\\u1234\\u012C",
"\uABCD\u1234\u012C");
}
/**
* Tests https://issues.apache.org/jira/browse/LANG-421
*/
@Test
public void testEscapeJavaWithSlash() {
final String input = "String with a slash (/) in it";
final String expected = input;
final String actual = StringEscapeUtils.escapeJava(input);
/**
* In 2.4 StringEscapeUtils.escapeJava(String) escapes '/' characters, which are not a valid character to escape
* in a Java string.
*/
assertEquals(expected, actual);
}
private void assertEscapeJava(String escaped, String original) throws IOException {
assertEscapeJava(null, escaped, original);
}
private void assertEscapeJava(String message, String expected, String original) throws IOException {
String converted = StringEscapeUtils.escapeJava(original);
message = "escapeJava(String) failed" + (message == null ? "" : (": " + message));
assertEquals(message, expected, converted);
StringWriter writer = new StringWriter();
StringEscapeUtils.ESCAPE_JAVA.translate(original, writer);
assertEquals(expected, writer.toString());
}
@Test
public void testUnescapeJava() throws IOException {
assertEquals(null, StringEscapeUtils.unescapeJava(null));
try {
StringEscapeUtils.UNESCAPE_JAVA.translate(null, null);
fail();
} catch (IOException ex) {
fail();
} catch (IllegalArgumentException ex) {
}
try {
StringEscapeUtils.UNESCAPE_JAVA.translate("", null);
fail();
} catch (IOException ex) {
fail();
} catch (IllegalArgumentException ex) {
}
try {
StringEscapeUtils.unescapeJava("\\u02-3");
fail();
} catch (RuntimeException ex) {
}
assertUnescapeJava("", "");
assertUnescapeJava("test", "test");
assertUnescapeJava("\ntest\b", "\\ntest\\b");
assertUnescapeJava("\u123425foo\ntest\b", "\\u123425foo\\ntest\\b");
assertUnescapeJava("'\foo\teste\r", "\\'\\foo\\teste\\r");
assertUnescapeJava("", "\\");
//foo
assertUnescapeJava("lowercase Unicode", "\uABCDx", "\\uabcdx");
assertUnescapeJava("uppercase Unicode", "\uABCDx", "\\uABCDx");
assertUnescapeJava("Unicode as final character", "\uABCD", "\\uabcd");
}
private void assertUnescapeJava(String unescaped, String original) throws IOException {
assertUnescapeJava(null, unescaped, original);
}
private void assertUnescapeJava(String message, String unescaped, String original) throws IOException {
String expected = unescaped;
String actual = StringEscapeUtils.unescapeJava(original);
assertEquals("unescape(String) failed" +
(message == null ? "" : (": " + message)) +
": expected '" + StringEscapeUtils.escapeJava(expected) +
// we escape this so we can see it in the error message
"' actual '" + StringEscapeUtils.escapeJava(actual) + "'",
expected, actual);
StringWriter writer = new StringWriter();
StringEscapeUtils.UNESCAPE_JAVA.translate(original, writer);
assertEquals(unescaped, writer.toString());
}
@Test
public void testEscapeEcmaScript() {
assertEquals(null, StringEscapeUtils.escapeEcmaScript(null));
try {
StringEscapeUtils.ESCAPE_ECMASCRIPT.translate(null, null);
fail();
} catch (IOException ex) {
fail();
} catch (IllegalArgumentException ex) {
}
try {
StringEscapeUtils.ESCAPE_ECMASCRIPT.translate("", null);
fail();
} catch (IOException ex) {
fail();
} catch (IllegalArgumentException ex) {
}
assertEquals("He didn\\'t say, \\\"stop!\\\"", StringEscapeUtils.escapeEcmaScript("He didn't say, \"stop!\""));
assertEquals("document.getElementById(\\\"test\\\").value = \\'<script>alert(\\'aaa\\');<\\/script>\\';",
StringEscapeUtils.escapeEcmaScript("document.getElementById(\"test\").value = '<script>alert('aaa');</script>';"));
}
// HTML and XML
//--------------------------------------------------------------
private static final String[][] HTML_ESCAPES = {
{"no escaping", "plain text", "plain text"},
{"no escaping", "plain text", "plain text"},
{"empty string", "", ""},
{"null", null, null},
{"ampersand", "bread & butter", "bread & butter"},
{"quotes", ""bread" & butter", "\"bread\" & butter"},
{"final character only", "greater than >", "greater than >"},
{"first character only", "< less than", "< less than"},
{"apostrophe", "Huntington's chorea", "Huntington's chorea"},
{"languages", "English,Français,\u65E5\u672C\u8A9E (nihongo)", "English,Fran\u00E7ais,\u65E5\u672C\u8A9E (nihongo)"},
{"8-bit ascii shouldn't number-escape", "\u0080\u009F", "\u0080\u009F"},
};
@Test
public void testEscapeHtml() {
for (int i = 0; i < HTML_ESCAPES.length; ++i) {
String message = HTML_ESCAPES[i][0];
String expected = HTML_ESCAPES[i][1];
String original = HTML_ESCAPES[i][2];
assertEquals(message, expected, StringEscapeUtils.escapeHtml4(original));
StringWriter sw = new StringWriter();
try {
StringEscapeUtils.ESCAPE_HTML4.translate(original, sw);
} catch (IOException e) {
}
String actual = original == null ? null : sw.toString();
assertEquals(message, expected, actual);
}
}
@Test
public void testUnescapeHtml4() {
for (int i = 0; i < HTML_ESCAPES.length; ++i) {
String message = HTML_ESCAPES[i][0];
String expected = HTML_ESCAPES[i][2];
String original = HTML_ESCAPES[i][1];
assertEquals(message, expected, StringEscapeUtils.unescapeHtml4(original));
StringWriter sw = new StringWriter();
try {
StringEscapeUtils.UNESCAPE_HTML4.translate(original, sw);
} catch (IOException e) {
}
String actual = original == null ? null : sw.toString();
assertEquals(message, expected, actual);
}
// \u00E7 is a cedilla (c with wiggle under)
// note that the test string must be 7-bit-clean (Unicode escaped) or else it will compile incorrectly
// on some locales
assertEquals("funny chars pass through OK", "Fran\u00E7ais", StringEscapeUtils.unescapeHtml4("Fran\u00E7ais"));
assertEquals("Hello&;World", StringEscapeUtils.unescapeHtml4("Hello&;World"));
assertEquals("Hello&#;World", StringEscapeUtils.unescapeHtml4("Hello&#;World"));
assertEquals("Hello&# ;World", StringEscapeUtils.unescapeHtml4("Hello&# ;World"));
assertEquals("Hello&##;World", StringEscapeUtils.unescapeHtml4("Hello&##;World"));
}
@Test
public void testUnescapeHexCharsHtml() {
// Simple easy to grok test
assertEquals("hex number unescape", "\u0080\u009F", StringEscapeUtils.unescapeHtml4("€Ÿ"));
assertEquals("hex number unescape", "\u0080\u009F", StringEscapeUtils.unescapeHtml4("€Ÿ"));
// Test all Character values:
for (char i = Character.MIN_VALUE; i < Character.MAX_VALUE; i++) {
Character c1 = new Character(i);
Character c2 = new Character((char)(i+1));
String expected = c1.toString() + c2.toString();
String escapedC1 = "&#x" + Integer.toHexString((c1.charValue())) + ";";
String escapedC2 = "&#x" + Integer.toHexString((c2.charValue())) + ";";
assertEquals("hex number unescape index " + (int)i, expected, StringEscapeUtils.unescapeHtml4(escapedC1 + escapedC2));
}
}
@Test
public void testUnescapeUnknownEntity() throws Exception {
assertEquals("&zzzz;", StringEscapeUtils.unescapeHtml4("&zzzz;"));
}
@Test
public void testEscapeHtmlVersions() throws Exception {
assertEquals("Β", StringEscapeUtils.escapeHtml4("\u0392"));
assertEquals("\u0392", StringEscapeUtils.unescapeHtml4("Β"));
// TODO: refine API for escaping/unescaping specific HTML versions
}
@Test
public void testEscapeXml() throws Exception {
assertEquals("<abc>", StringEscapeUtils.escapeXml("<abc>"));
assertEquals("<abc>", StringEscapeUtils.unescapeXml("<abc>"));
assertEquals("XML should not escape >0x7f values",
"\u00A1", StringEscapeUtils.escapeXml("\u00A1"));
assertEquals("XML should be able to unescape >0x7f values",
"\u00A0", StringEscapeUtils.unescapeXml(" "));
assertEquals("XML should be able to unescape >0x7f values with one leading 0",
"\u00A0", StringEscapeUtils.unescapeXml(" "));
assertEquals("XML should be able to unescape >0x7f values with two leading 0s",
"\u00A0", StringEscapeUtils.unescapeXml(" "));
assertEquals("XML should be able to unescape >0x7f values with three leading 0s",
"\u00A0", StringEscapeUtils.unescapeXml(" "));
assertEquals("ain't", StringEscapeUtils.unescapeXml("ain't"));
assertEquals("ain't", StringEscapeUtils.escapeXml("ain't"));
assertEquals("", StringEscapeUtils.escapeXml(""));
assertEquals(null, StringEscapeUtils.escapeXml(null));
assertEquals(null, StringEscapeUtils.unescapeXml(null));
StringWriter sw = new StringWriter();
try {
StringEscapeUtils.ESCAPE_XML.translate("<abc>", sw);
} catch (IOException e) {
}
assertEquals("XML was escaped incorrectly", "<abc>", sw.toString() );
sw = new StringWriter();
try {
StringEscapeUtils.UNESCAPE_XML.translate("<abc>", sw);
} catch (IOException e) {
}
assertEquals("XML was unescaped incorrectly", "<abc>", sw.toString() );
}
/**
* Tests Supplementary characters.
* <p>
* From http://www.w3.org/International/questions/qa-escapes
* </p>
* <blockquote>
* Supplementary characters are those Unicode characters that have code points higher than the characters in
* the Basic Multilingual Plane (BMP). In UTF-16 a supplementary character is encoded using two 16-bit surrogate code points from the
* BMP. Because of this, some people think that supplementary characters need to be represented using two escapes, but this is incorrect
* - you must use the single, code point value for that character. For example, use 𣎴 rather than ��.
* </blockquote>
* @see <a href="http://www.w3.org/International/questions/qa-escapes">Using character escapes in markup and CSS</a>
* @see <a href="https://issues.apache.org/jira/browse/LANG-728">LANG-728</a>
*/
@Test
public void testEscapeXmlSupplementaryCharacters() {
CharSequenceTranslator escapeXml =
StringEscapeUtils.ESCAPE_XML.with( NumericEntityEscaper.between(0x7f, Integer.MAX_VALUE) );
assertEquals("Supplementary character must be represented using a single escape", "𣎴",
escapeXml.translate("\uD84C\uDFB4"));
}
/**
* Reverse of the above.
*
* @see <a href="https://issues.apache.org/jira/browse/LANG-729">LANG-729</a>
*/
@Test
public void testUnescapeXmlSupplementaryCharacters() {
assertEquals("Supplementary character must be represented using a single escape", "\uD84C\uDFB4",
StringEscapeUtils.unescapeXml("𣎴") );
}
// Tests issue #38569
// http://issues.apache.org/bugzilla/show_bug.cgi?id=38569
@Test
public void testStandaloneAmphersand() {
assertEquals("<P&O>", StringEscapeUtils.unescapeHtml4("<P&O>"));
assertEquals("test & <", StringEscapeUtils.unescapeHtml4("test & <"));
assertEquals("<P&O>", StringEscapeUtils.unescapeXml("<P&O>"));
assertEquals("test & <", StringEscapeUtils.unescapeXml("test & <"));
}
@Test
public void testLang313() {
assertEquals("& &", StringEscapeUtils.unescapeHtml4("& &"));
}
@Test
public void testEscapeCsvString() throws Exception {
assertEquals("foo.bar", StringEscapeUtils.escapeCsv("foo.bar"));
assertEquals("\"foo,bar\"", StringEscapeUtils.escapeCsv("foo,bar"));
assertEquals("\"foo\nbar\"", StringEscapeUtils.escapeCsv("foo\nbar"));
assertEquals("\"foo\rbar\"", StringEscapeUtils.escapeCsv("foo\rbar"));
assertEquals("\"foo\"\"bar\"", StringEscapeUtils.escapeCsv("foo\"bar"));
assertEquals("", StringEscapeUtils.escapeCsv(""));
assertEquals(null, StringEscapeUtils.escapeCsv(null));
}
@Test
public void testEscapeCsvWriter() throws Exception {
checkCsvEscapeWriter("foo.bar", "foo.bar");
checkCsvEscapeWriter("\"foo,bar\"", "foo,bar");
checkCsvEscapeWriter("\"foo\nbar\"", "foo\nbar");
checkCsvEscapeWriter("\"foo\rbar\"", "foo\rbar");
checkCsvEscapeWriter("\"foo\"\"bar\"", "foo\"bar");
checkCsvEscapeWriter("", null);
checkCsvEscapeWriter("", "");
}
private void checkCsvEscapeWriter(String expected, String value) {
try {
StringWriter writer = new StringWriter();
StringEscapeUtils.ESCAPE_CSV.translate(value, writer);
assertEquals(expected, writer.toString());
} catch (IOException e) {
fail("Threw: " + e);
}
}
@Test
public void testUnescapeCsvString() throws Exception {
assertEquals("foo.bar", StringEscapeUtils.unescapeCsv("foo.bar"));
assertEquals("foo,bar", StringEscapeUtils.unescapeCsv("\"foo,bar\""));
assertEquals("foo\nbar", StringEscapeUtils.unescapeCsv("\"foo\nbar\""));
assertEquals("foo\rbar", StringEscapeUtils.unescapeCsv("\"foo\rbar\""));
assertEquals("foo\"bar", StringEscapeUtils.unescapeCsv("\"foo\"\"bar\""));
assertEquals("", StringEscapeUtils.unescapeCsv(""));
assertEquals(null, StringEscapeUtils.unescapeCsv(null));
assertEquals("\"foo.bar\"", StringEscapeUtils.unescapeCsv("\"foo.bar\""));
}
@Test
public void testUnescapeCsvWriter() throws Exception {
checkCsvUnescapeWriter("foo.bar", "foo.bar");
checkCsvUnescapeWriter("foo,bar", "\"foo,bar\"");
checkCsvUnescapeWriter("foo\nbar", "\"foo\nbar\"");
checkCsvUnescapeWriter("foo\rbar", "\"foo\rbar\"");
checkCsvUnescapeWriter("foo\"bar", "\"foo\"\"bar\"");
checkCsvUnescapeWriter("", null);
checkCsvUnescapeWriter("", "");
checkCsvUnescapeWriter("\"foo.bar\"", "\"foo.bar\"");
}
private void checkCsvUnescapeWriter(String expected, String value) {
try {
StringWriter writer = new StringWriter();
StringEscapeUtils.UNESCAPE_CSV.translate(value, writer);
assertEquals(expected, writer.toString());
} catch (IOException e) {
fail("Threw: " + e);
}
}
/**
* Tests // https://issues.apache.org/jira/browse/LANG-480
*
* @throws java.io.UnsupportedEncodingException
*/
@Test
public void testEscapeHtmlHighUnicode() throws java.io.UnsupportedEncodingException {
// this is the utf8 representation of the character:
// COUNTING ROD UNIT DIGIT THREE
// in Unicode
// codepoint: U+1D362
byte[] data = new byte[] { (byte)0xF0, (byte)0x9D, (byte)0x8D, (byte)0xA2 };
String original = new String(data, "UTF8");
String escaped = StringEscapeUtils.escapeHtml4( original );
assertEquals( "High Unicode should not have been escaped", original, escaped);
String unescaped = StringEscapeUtils.unescapeHtml4( escaped );
assertEquals( "High Unicode should have been unchanged", original, unescaped);
// TODO: I think this should hold, needs further investigation
// String unescapedFromEntity = StringEscapeUtils.unescapeHtml4( "𝍢" );
// assertEquals( "High Unicode should have been unescaped", original, unescapedFromEntity);
}
/**
* Tests https://issues.apache.org/jira/browse/LANG-339
*/
@Test
public void testEscapeHiragana() {
// Some random Japanese Unicode characters
String original = "\u304B\u304C\u3068";
String escaped = StringEscapeUtils.escapeHtml4(original);
assertEquals( "Hiragana character Unicode behaviour should not be being escaped by escapeHtml4",
original, escaped);
String unescaped = StringEscapeUtils.unescapeHtml4( escaped );
assertEquals( "Hiragana character Unicode behaviour has changed - expected no unescaping", escaped, unescaped);
}
/**
* Tests https://issues.apache.org/jira/browse/LANG-708
*
* @throws IOException
* if an I/O error occurs
*/
@Test
public void testLang708() throws IOException {
String input = IOUtils.toString(new FileInputStream("src/test/resources/lang-708-input.txt"), "UTF-8");
String escaped = StringEscapeUtils.escapeEcmaScript(input);
// just the end:
assertTrue(escaped, escaped.endsWith("}]"));
// a little more:
assertTrue(escaped, escaped.endsWith("\"valueCode\\\":\\\"\\\"}]"));
}
/**
* Tests https://issues.apache.org/jira/browse/LANG-720
*/
@Test
public void testLang720() {
String input = new StringBuilder("\ud842\udfb7").append("A").toString();
String escaped = StringEscapeUtils.escapeXml(input);
assertEquals(input, escaped);
}
}
| |
/*
* Copyright 2003-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.runtime.dgmimpl;
import groovy.lang.MetaClassImpl;
import groovy.lang.MetaMethod;
import org.codehaus.groovy.runtime.callsite.CallSite;
import org.codehaus.groovy.runtime.typehandling.NumberMath;
public final class NumberNumberMinus extends NumberNumberMetaMethod {
public String getName() {
return "minus";
}
public Object invoke(Object object, Object[] arguments) {
return NumberMath.subtract((Number) object, (Number) arguments[0]);
}
/**
* Subtraction of two Numbers.
*
* @param left a Number
* @param right another Number to subtract to the first one
* @return the subtraction
*/
public static Number minus(Number left, Number right) {
return NumberMath.subtract(left, right);
}
public CallSite createPojoCallSite(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
if (receiver instanceof Integer) {
if (args[0] instanceof Integer)
return new IntegerInteger(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Long)
return new IntegerLong(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Float)
return new IntegerFloat(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Double)
return new IntegerDouble(site, metaClass, metaMethod, params, receiver, args);
}
if (receiver instanceof Long) {
if (args[0] instanceof Integer)
return new LongInteger(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Long)
return new LongLong(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Float)
return new LongFloat(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Double)
return new LongDouble(site, metaClass, metaMethod, params, receiver, args);
}
if (receiver instanceof Float) {
if (args[0] instanceof Integer)
return new FloatInteger(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Long)
return new FloatLong(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Float)
return new FloatFloat(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Double)
return new FloatDouble(site, metaClass, metaMethod, params, receiver, args);
}
if (receiver instanceof Double) {
if (args[0] instanceof Integer)
return new DoubleInteger(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Long)
return new DoubleLong(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Float)
return new DoubleFloat(site, metaClass, metaMethod, params, receiver, args);
if (args[0] instanceof Double)
return new DoubleDouble(site, metaClass, metaMethod, params, receiver, args);
}
return new NumberNumber(site, metaClass, metaMethod, params, receiver, args);
}
private static class DoubleDouble extends NumberNumberCallSite {
public DoubleDouble(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return (Double) receiver - (Double) arg;
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class DoubleFloat extends NumberNumberCallSite {
public DoubleFloat(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return (Double) receiver - ((Float) arg).doubleValue();
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class DoubleLong extends NumberNumberCallSite {
public DoubleLong(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return (Double) receiver - ((Long) arg).doubleValue();
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class DoubleInteger extends NumberNumberCallSite {
public DoubleInteger(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return (Double) receiver - (Integer) arg;
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class FloatDouble extends NumberNumberCallSite {
public FloatDouble(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return ((Float) receiver).doubleValue() - (Double) arg;
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class FloatFloat extends NumberNumberCallSite {
public FloatFloat(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return ((Float) receiver).doubleValue() - ((Float) arg).doubleValue();
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class FloatLong extends NumberNumberCallSite {
public FloatLong(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return ((Float) receiver).doubleValue() - ((Long) arg).doubleValue();
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class FloatInteger extends NumberNumberCallSite {
public FloatInteger(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return ((Float) receiver).doubleValue() - ((Integer) arg).doubleValue();
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class LongDouble extends NumberNumberCallSite {
public LongDouble(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return ((Long) receiver).doubleValue() - (Double) arg;
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class LongFloat extends NumberNumberCallSite {
public LongFloat(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return ((Long) receiver).doubleValue() - ((Float) arg).doubleValue();
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class LongLong extends NumberNumberCallSite {
public LongLong(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return (Long) receiver - (Long) arg;
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class LongInteger extends NumberNumberCallSite {
public LongInteger(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return (Long) receiver - ((Integer) arg).longValue();
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class IntegerDouble extends NumberNumberCallSite {
public IntegerDouble(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return ((Integer) receiver).doubleValue() - (Double) arg;
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class IntegerFloat extends NumberNumberCallSite {
public IntegerFloat(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return ((Integer) receiver).doubleValue() - ((Float) arg).doubleValue();
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class IntegerLong extends NumberNumberCallSite {
public IntegerLong(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return ((Integer) receiver).longValue() - (Long) arg;
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class IntegerInteger extends NumberNumberCallSite {
public IntegerInteger(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object call(Object receiver, Object arg) throws Throwable {
try {
if (checkCall(receiver, arg)) {
return (Integer) receiver - (Integer) arg;
}
}
catch (ClassCastException e) {//
}
return super.call(receiver, arg);
}
}
private static class NumberNumber extends NumberNumberCallSite {
public NumberNumber(CallSite site, MetaClassImpl metaClass, MetaMethod metaMethod, Class[] params, Object receiver, Object[] args) {
super(site, metaClass, metaMethod, params, (Number) receiver, (Number) args[0]);
}
public final Object invoke(Object receiver, Object[] args) {
return math.subtractImpl((Number)receiver,(Number)args[0]);
}
public final Object invoke(Object receiver, Object arg) {
return math.subtractImpl((Number)receiver,(Number)arg);
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.legacygeo.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.geo.Orientation;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.geometry.Circle;
import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.geometry.GeometryCollection;
import org.elasticsearch.geometry.GeometryVisitor;
import org.elasticsearch.geometry.Line;
import org.elasticsearch.geometry.LinearRing;
import org.elasticsearch.geometry.MultiLine;
import org.elasticsearch.geometry.MultiPoint;
import org.elasticsearch.geometry.MultiPolygon;
import org.elasticsearch.geometry.Point;
import org.elasticsearch.geometry.Polygon;
import org.elasticsearch.geometry.Rectangle;
import org.elasticsearch.index.query.ExistsQueryBuilder;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.legacygeo.builders.CircleBuilder;
import org.elasticsearch.legacygeo.builders.EnvelopeBuilder;
import org.elasticsearch.legacygeo.builders.GeometryCollectionBuilder;
import org.elasticsearch.legacygeo.builders.LineStringBuilder;
import org.elasticsearch.legacygeo.builders.MultiLineStringBuilder;
import org.elasticsearch.legacygeo.builders.MultiPointBuilder;
import org.elasticsearch.legacygeo.builders.MultiPolygonBuilder;
import org.elasticsearch.legacygeo.builders.PointBuilder;
import org.elasticsearch.legacygeo.builders.PolygonBuilder;
import org.elasticsearch.legacygeo.builders.ShapeBuilder;
import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.spatial4j.shape.Shape;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES;
public class LegacyGeoShapeQueryProcessor {
private final LegacyGeoShapeFieldMapper.GeoShapeFieldType shapeFieldType;
public LegacyGeoShapeQueryProcessor(LegacyGeoShapeFieldMapper.GeoShapeFieldType shapeFieldType) {
this.shapeFieldType = shapeFieldType;
}
public Query geoShapeQuery(
Geometry shape,
String fieldName,
SpatialStrategy strategy,
ShapeRelation relation,
SearchExecutionContext context
) {
if (context.allowExpensiveQueries() == false) {
throw new ElasticsearchException(
"[geo-shape] queries on [PrefixTree geo shapes] cannot be executed when '"
+ ALLOW_EXPENSIVE_QUERIES.getKey()
+ "' is set to false."
);
}
SpatialStrategy spatialStrategy = shapeFieldType.strategy();
if (strategy != null) {
spatialStrategy = strategy;
}
PrefixTreeStrategy prefixTreeStrategy = shapeFieldType.resolvePrefixTreeStrategy(spatialStrategy);
if (prefixTreeStrategy instanceof RecursivePrefixTreeStrategy && relation == ShapeRelation.DISJOINT) {
// this strategy doesn't support disjoint anymore: but it did
// before, including creating lucene fieldcache (!)
// in this case, execute disjoint as exists && !intersects
BooleanQuery.Builder bool = new BooleanQuery.Builder();
Query exists = ExistsQueryBuilder.newFilter(context, fieldName, false);
Query intersects = prefixTreeStrategy.makeQuery(getArgs(shape, ShapeRelation.INTERSECTS));
bool.add(exists, BooleanClause.Occur.MUST);
bool.add(intersects, BooleanClause.Occur.MUST_NOT);
return bool.build();
} else {
return prefixTreeStrategy.makeQuery(getArgs(shape, relation));
}
}
public static SpatialArgs getArgs(Geometry shape, ShapeRelation relation) {
switch (relation) {
case DISJOINT:
return new SpatialArgs(SpatialOperation.IsDisjointTo, buildS4J(shape));
case INTERSECTS:
return new SpatialArgs(SpatialOperation.Intersects, buildS4J(shape));
case WITHIN:
return new SpatialArgs(SpatialOperation.IsWithin, buildS4J(shape));
case CONTAINS:
return new SpatialArgs(SpatialOperation.Contains, buildS4J(shape));
default:
throw new IllegalArgumentException("invalid relation [" + relation + "]");
}
}
/**
* Builds JTS shape from a geometry
* <p>
* This method is needed to handle legacy indices and will be removed when we no longer need to build JTS shapes
*/
private static Shape buildS4J(Geometry geometry) {
return geometryToShapeBuilder(geometry).buildS4J();
}
public static ShapeBuilder<?, ?, ?> geometryToShapeBuilder(Geometry geometry) {
ShapeBuilder<?, ?, ?> shapeBuilder = geometry.visit(new GeometryVisitor<>() {
@Override
public ShapeBuilder<?, ?, ?> visit(Circle circle) {
return new CircleBuilder().center(circle.getLon(), circle.getLat()).radius(circle.getRadiusMeters(), DistanceUnit.METERS);
}
@Override
public ShapeBuilder<?, ?, ?> visit(GeometryCollection<?> collection) {
GeometryCollectionBuilder shapes = new GeometryCollectionBuilder();
for (Geometry geometry : collection) {
shapes.shape(geometry.visit(this));
}
return shapes;
}
@Override
public ShapeBuilder<?, ?, ?> visit(Line line) {
List<Coordinate> coordinates = new ArrayList<>();
for (int i = 0; i < line.length(); i++) {
coordinates.add(new Coordinate(line.getX(i), line.getY(i), line.getZ(i)));
}
return new LineStringBuilder(coordinates);
}
@Override
public ShapeBuilder<?, ?, ?> visit(LinearRing ring) {
throw new UnsupportedOperationException("LinearRing is not supported");
}
@Override
public ShapeBuilder<?, ?, ?> visit(MultiLine multiLine) {
MultiLineStringBuilder lines = new MultiLineStringBuilder();
for (int i = 0; i < multiLine.size(); i++) {
lines.linestring((LineStringBuilder) visit(multiLine.get(i)));
}
return lines;
}
@Override
public ShapeBuilder<?, ?, ?> visit(MultiPoint multiPoint) {
List<Coordinate> coordinates = new ArrayList<>();
for (int i = 0; i < multiPoint.size(); i++) {
Point p = multiPoint.get(i);
coordinates.add(new Coordinate(p.getX(), p.getY(), p.getZ()));
}
return new MultiPointBuilder(coordinates);
}
@Override
public ShapeBuilder<?, ?, ?> visit(MultiPolygon multiPolygon) {
MultiPolygonBuilder polygons = new MultiPolygonBuilder();
for (int i = 0; i < multiPolygon.size(); i++) {
polygons.polygon((PolygonBuilder) visit(multiPolygon.get(i)));
}
return polygons;
}
@Override
public ShapeBuilder<?, ?, ?> visit(Point point) {
return new PointBuilder(point.getX(), point.getY());
}
@Override
public ShapeBuilder<?, ?, ?> visit(Polygon polygon) {
PolygonBuilder polygonBuilder = new PolygonBuilder(
(LineStringBuilder) visit((Line) polygon.getPolygon()),
Orientation.RIGHT,
false
);
for (int i = 0; i < polygon.getNumberOfHoles(); i++) {
polygonBuilder.hole((LineStringBuilder) visit((Line) polygon.getHole(i)));
}
return polygonBuilder;
}
@Override
public ShapeBuilder<?, ?, ?> visit(Rectangle rectangle) {
return new EnvelopeBuilder(
new Coordinate(rectangle.getMinX(), rectangle.getMaxY()),
new Coordinate(rectangle.getMaxX(), rectangle.getMinY())
);
}
});
return shapeBuilder;
}
}
| |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.humantask.coordination.module.handlers;
import java.util.Iterator;
import org.apache.axiom.om.OMAbstractFactory;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMException;
import org.apache.axiom.soap.SOAPFactory;
import org.apache.axiom.soap.SOAPHeader;
import org.apache.axiom.soap.SOAPHeaderBlock;
import org.apache.axis2.AxisFault;
import org.apache.axis2.addressing.EndpointReference;
import org.apache.axis2.client.Options;
import org.apache.axis2.client.ServiceClient;
import org.apache.axis2.context.MessageContext;
import org.apache.axis2.engine.Handler;
import org.apache.axis2.handlers.AbstractHandler;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.humantask.coordination.module.utils.Constants;
import org.wso2.carbon.humantask.coordination.module.HumanTaskCoordinationException;
import org.wso2.carbon.humantask.coordination.module.utils.SOAPUtils;
import org.wso2.carbon.humantask.coordination.module.utils.ServiceUtils;
import org.wso2.carbon.humantask.coordination.module.internal.HTCoordinationModuleContentHolder;
import org.wso2.carbon.humantask.core.configuration.HumanTaskServerConfiguration;
import org.wso2.carbon.utils.CarbonUtils;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
/**
* Axis2 Handler Class for handle HumanTaskCoordination Context
*/
public class HTCoordinationContextHandler extends AbstractHandler implements Handler {
private static Log log = LogFactory.getLog(HTCoordinationContextHandler.class);
private static HumanTaskServerConfiguration serverConfig = null;
static {
if(HTCoordinationModuleContentHolder.getInstance().getHtServer() != null){
serverConfig = HTCoordinationModuleContentHolder.getInstance().getHtServer().getServerConfig();
}
}
public HTCoordinationContextHandler() throws HumanTaskCoordinationException {
super();
}
@Override
public InvocationResponse invoke(MessageContext messageContext) throws AxisFault {
if(serverConfig == null || !serverConfig.isTaskRegistrationEnabled())
{
return InvocationResponse.CONTINUE;
}
SOAPHeader soapHeader;
try {
soapHeader = messageContext.getEnvelope().getHeader();
} catch (OMException ex) {
throw new AxisFault("Error while extracting SOAP header", ex);
}
if (soapHeader == null) {
if (log.isDebugEnabled()) {
log.debug("No SOAP Header received. Continuing as an uncoordinated HumanTask.");
}
return InvocationResponse.CONTINUE;
}
Iterator headers = soapHeader.getChildElements();
SOAPHeaderBlock coordinationHeaderBlock = null;
// Searching for WS-Coor Coordination Context
while (headers.hasNext()) {
SOAPHeaderBlock hb = (SOAPHeaderBlock) headers.next();
if (hb.getLocalName().equals(Constants.WS_COOR_COORDINATION_CONTEXT) &&
hb.getNamespace().getNamespaceURI().equals(Constants.WS_COOR_NAMESPACE)) {
coordinationHeaderBlock = hb;
break;
}
}
if (coordinationHeaderBlock == null) {
if (log.isDebugEnabled()) {
log.debug("No coordination context received. Processing as an uncoordinated HumanTask.");
}
return InvocationResponse.CONTINUE;
}
//We have received a ws-coordination context. Now validate it for HT coordination type
String coordinationType = SOAPUtils.getCoordinationType(coordinationHeaderBlock);
if (!Constants.WS_HT_COORDINATION_TYPE.equals(coordinationType)) {
// Found wrong coordination Type. We Only support http://docs.oasis-open.org/ns/bpel4people/ws-humantask/protocol/200803.
// So we cannot allow message to go forward.
String errorMsg = "Message aborted ! Invalid coordination type" + coordinationType +
" . Support only " + Constants.WS_HT_COORDINATION_TYPE;
log.error(errorMsg);
return InvocationResponse.ABORT;
}
if (log.isDebugEnabled()) {
log.debug("HT coordination context received.");
}
String identifier = SOAPUtils.getCoordinationIdentifier(coordinationHeaderBlock);
String registrationService = SOAPUtils.getRegistrationService(coordinationHeaderBlock);
//validating values. These values cannot be empty
if (identifier == null || identifier.isEmpty() || registrationService == null || registrationService.isEmpty()) {
String errorMsg = "Message aborted ! Invalid coordination context parameters.";
log.error(errorMsg);
return InvocationResponse.ABORT;
}
//Service URL of the HumanTask Coordination Protocol Handler AdminService
String humanTaskProtocolHandlerServiceURL;
try {
humanTaskProtocolHandlerServiceURL = ServiceUtils.getTaskProtocolHandlerURL(messageContext.getConfigurationContext());
} catch (HumanTaskCoordinationException e) {
String errorMsg = "Error while generating HumanTask engine's protocol Handler Service URL.";
log.error(errorMsg);
throw new AxisFault(e.getLocalizedMessage(), e);
}
// We are OK to invokeRegistrationService Registration service
try {
OMElement response = invokeRegistrationServiceUsingServiceClient(identifier, humanTaskProtocolHandlerServiceURL, registrationService);
// We just discard registration response, since we are using CallBack service as TaskParent's Protocol Handler.
// But we are validating it for successful completion.
if (!SOAPUtils.validateResponse(response, identifier)) {
String errorMsg = "Message aborted ! registration response validation failed.";
log.error(errorMsg);
return InvocationResponse.ABORT;
}
//successful coordination
if (log.isDebugEnabled()) {
log.debug("RegistrationResponse received. Task is successfully coordinated with Task parent.");
}
} catch (AxisFault e) {
String errorMsg = "Error while invoking registration service";
log.error(errorMsg);
throw new AxisFault(e.getLocalizedMessage(), e);
}
return InvocationResponse.CONTINUE;
}
@Override
public String getName() {
return "HumanTask Coordination Protocol Handler";
}
private OMElement invokeRegistrationServiceUsingServiceClient(String identifier, String taskProtocolHandlerServiceURL, String registrationService) throws AxisFault{
OMElement payload = SOAPUtils.getRegistrationPayload(identifier, taskProtocolHandlerServiceURL);
Options options = new Options();
options.setTo(new EndpointReference(registrationService)); // this sets the location of registration service
options.setAction(Constants.WS_COOR_REGISTERATION_ACTION);
options.setTransportInProtocol(org.apache.axis2.Constants.TRANSPORT_HTTPS);
ServiceClient serviceClient = new ServiceClient();
serviceClient.setOptions(options);
//Setting basic auth headers. Reading those information using HT server config.
String tenantDomain = MultitenantUtils.getTenantDomainFromUrl(registrationService);
if (registrationService.equals(tenantDomain)) {
//this is a Super tenant registration service
CarbonUtils.setBasicAccessSecurityHeaders(serverConfig.getRegistrationServiceAuthUsername(), serverConfig.getRegistrationServiceAuthPassword(), serviceClient);
} else {
if (log.isDebugEnabled()) {
log.debug("Sending ws-coor Registration request to tenant domain: " + tenantDomain);
}
// Tenant's registration service
CarbonUtils.setBasicAccessSecurityHeaders(
serverConfig.getRegistrationServiceAuthUsername() + "@" + tenantDomain,
serverConfig.getRegistrationServiceAuthPassword(),
serviceClient);
}
return serviceClient.sendReceive(payload);
}
private OMElement invokeRegistrationService(String identifier, String taskProtocolHandlerServiceURL, String registrationService) throws AxisFault {
OMElement payload = SOAPUtils.getRegistrationPayload(identifier, taskProtocolHandlerServiceURL);
SOAPFactory soap11Factory = OMAbstractFactory.getSOAP11Factory();
MessageContext mctx = new MessageContext();
if (mctx.getEnvelope() == null) {
mctx.setEnvelope(soap11Factory.createSOAPEnvelope());
}
if (mctx.getEnvelope().getBody() == null) {
soap11Factory.createSOAPBody(mctx.getEnvelope());
}
if (mctx.getEnvelope().getHeader() == null) {
soap11Factory.createSOAPHeader(mctx.getEnvelope());
}
mctx.getEnvelope().getBody().addChild(payload);
// //Setting basic auth headers. Reading those information using HT server config.
// HumanTaskServerConfiguration serverConfig = HTCoordinationModuleContentHolder.getInstance().getHtServer().getServerConfig();
// String tenantDomain = MultitenantUtils.getTenantDomainFromUrl(registrationService);
// if (registrationService.equals(tenantDomain)) {
// //this is a Super tenant registration service
// if (log.isDebugEnabled()) {
// log.debug("Sending Username" + serverConfig.getRegistrationServiceAuthUsername() + " - " + serverConfig.getRegistrationServiceAuthPassword()); //TODO REMOVE this
// }
// CarbonUtils.setBasicAccessSecurityHeaders(serverConfig.getRegistrationServiceAuthUsername(), serverConfig.getRegistrationServiceAuthPassword(), true, mctx);
// } else {
// if (log.isDebugEnabled()) {
// log.debug("Sending ws-coor Registration request to tenant domain: " + tenantDomain);
// }
// // Tenant's registration service
//
// String username = serverConfig.getRegistrationServiceAuthUsername() + "@" + tenantDomain;
// String pass = serverConfig.getRegistrationServiceAuthPassword();
// if (log.isDebugEnabled()) {
// log.debug("Sending Username" + username + " - " + pass); //TODO REMOVE this
// }
// CarbonUtils.setBasicAccessSecurityHeaders(
// username,
// pass,
// true,
// mctx);
// }
MessageContext responseMsgContext = ServiceUtils.invokeRegistrationService(mctx, registrationService);
if (responseMsgContext.getEnvelope() != null) {
if (responseMsgContext.getEnvelope().getBody() != null) {
return responseMsgContext.getEnvelope().getBody();
}
}
return null;
}
}
| |
/* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.kubernetes.pods;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.ExtendedPropertyConfigurerGetter;
import org.apache.camel.spi.PropertyConfigurerGetter;
import org.apache.camel.spi.ConfigurerStrategy;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.util.CaseInsensitiveMap;
import org.apache.camel.support.component.PropertyConfigurerSupport;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class KubernetesPodsEndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
KubernetesPodsEndpoint target = (KubernetesPodsEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiversion":
case "apiVersion": target.getConfiguration().setApiVersion(property(camelContext, java.lang.String.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "cacertdata":
case "caCertData": target.getConfiguration().setCaCertData(property(camelContext, java.lang.String.class, value)); return true;
case "cacertfile":
case "caCertFile": target.getConfiguration().setCaCertFile(property(camelContext, java.lang.String.class, value)); return true;
case "clientcertdata":
case "clientCertData": target.getConfiguration().setClientCertData(property(camelContext, java.lang.String.class, value)); return true;
case "clientcertfile":
case "clientCertFile": target.getConfiguration().setClientCertFile(property(camelContext, java.lang.String.class, value)); return true;
case "clientkeyalgo":
case "clientKeyAlgo": target.getConfiguration().setClientKeyAlgo(property(camelContext, java.lang.String.class, value)); return true;
case "clientkeydata":
case "clientKeyData": target.getConfiguration().setClientKeyData(property(camelContext, java.lang.String.class, value)); return true;
case "clientkeyfile":
case "clientKeyFile": target.getConfiguration().setClientKeyFile(property(camelContext, java.lang.String.class, value)); return true;
case "clientkeypassphrase":
case "clientKeyPassphrase": target.getConfiguration().setClientKeyPassphrase(property(camelContext, java.lang.String.class, value)); return true;
case "connectiontimeout":
case "connectionTimeout": target.getConfiguration().setConnectionTimeout(property(camelContext, java.lang.Integer.class, value)); return true;
case "crdgroup":
case "crdGroup": target.getConfiguration().setCrdGroup(property(camelContext, java.lang.String.class, value)); return true;
case "crdname":
case "crdName": target.getConfiguration().setCrdName(property(camelContext, java.lang.String.class, value)); return true;
case "crdplural":
case "crdPlural": target.getConfiguration().setCrdPlural(property(camelContext, java.lang.String.class, value)); return true;
case "crdscope":
case "crdScope": target.getConfiguration().setCrdScope(property(camelContext, java.lang.String.class, value)); return true;
case "crdversion":
case "crdVersion": target.getConfiguration().setCrdVersion(property(camelContext, java.lang.String.class, value)); return true;
case "dnsdomain":
case "dnsDomain": target.getConfiguration().setDnsDomain(property(camelContext, java.lang.String.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "kubernetesclient":
case "kubernetesClient": target.getConfiguration().setKubernetesClient(property(camelContext, io.fabric8.kubernetes.client.KubernetesClient.class, value)); return true;
case "labelkey":
case "labelKey": target.getConfiguration().setLabelKey(property(camelContext, java.lang.String.class, value)); return true;
case "labelvalue":
case "labelValue": target.getConfiguration().setLabelValue(property(camelContext, java.lang.String.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "namespace": target.getConfiguration().setNamespace(property(camelContext, java.lang.String.class, value)); return true;
case "oauthtoken":
case "oauthToken": target.getConfiguration().setOauthToken(property(camelContext, java.lang.String.class, value)); return true;
case "operation": target.getConfiguration().setOperation(property(camelContext, java.lang.String.class, value)); return true;
case "password": target.getConfiguration().setPassword(property(camelContext, java.lang.String.class, value)); return true;
case "poolsize":
case "poolSize": target.getConfiguration().setPoolSize(property(camelContext, int.class, value)); return true;
case "portname":
case "portName": target.getConfiguration().setPortName(property(camelContext, java.lang.String.class, value)); return true;
case "portprotocol":
case "portProtocol": target.getConfiguration().setPortProtocol(property(camelContext, java.lang.String.class, value)); return true;
case "resourcename":
case "resourceName": target.getConfiguration().setResourceName(property(camelContext, java.lang.String.class, value)); return true;
case "trustcerts":
case "trustCerts": target.getConfiguration().setTrustCerts(property(camelContext, java.lang.Boolean.class, value)); return true;
case "username": target.getConfiguration().setUsername(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiversion":
case "apiVersion": return java.lang.String.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "cacertdata":
case "caCertData": return java.lang.String.class;
case "cacertfile":
case "caCertFile": return java.lang.String.class;
case "clientcertdata":
case "clientCertData": return java.lang.String.class;
case "clientcertfile":
case "clientCertFile": return java.lang.String.class;
case "clientkeyalgo":
case "clientKeyAlgo": return java.lang.String.class;
case "clientkeydata":
case "clientKeyData": return java.lang.String.class;
case "clientkeyfile":
case "clientKeyFile": return java.lang.String.class;
case "clientkeypassphrase":
case "clientKeyPassphrase": return java.lang.String.class;
case "connectiontimeout":
case "connectionTimeout": return java.lang.Integer.class;
case "crdgroup":
case "crdGroup": return java.lang.String.class;
case "crdname":
case "crdName": return java.lang.String.class;
case "crdplural":
case "crdPlural": return java.lang.String.class;
case "crdscope":
case "crdScope": return java.lang.String.class;
case "crdversion":
case "crdVersion": return java.lang.String.class;
case "dnsdomain":
case "dnsDomain": return java.lang.String.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "kubernetesclient":
case "kubernetesClient": return io.fabric8.kubernetes.client.KubernetesClient.class;
case "labelkey":
case "labelKey": return java.lang.String.class;
case "labelvalue":
case "labelValue": return java.lang.String.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "namespace": return java.lang.String.class;
case "oauthtoken":
case "oauthToken": return java.lang.String.class;
case "operation": return java.lang.String.class;
case "password": return java.lang.String.class;
case "poolsize":
case "poolSize": return int.class;
case "portname":
case "portName": return java.lang.String.class;
case "portprotocol":
case "portProtocol": return java.lang.String.class;
case "resourcename":
case "resourceName": return java.lang.String.class;
case "trustcerts":
case "trustCerts": return java.lang.Boolean.class;
case "username": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
KubernetesPodsEndpoint target = (KubernetesPodsEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiversion":
case "apiVersion": return target.getConfiguration().getApiVersion();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "cacertdata":
case "caCertData": return target.getConfiguration().getCaCertData();
case "cacertfile":
case "caCertFile": return target.getConfiguration().getCaCertFile();
case "clientcertdata":
case "clientCertData": return target.getConfiguration().getClientCertData();
case "clientcertfile":
case "clientCertFile": return target.getConfiguration().getClientCertFile();
case "clientkeyalgo":
case "clientKeyAlgo": return target.getConfiguration().getClientKeyAlgo();
case "clientkeydata":
case "clientKeyData": return target.getConfiguration().getClientKeyData();
case "clientkeyfile":
case "clientKeyFile": return target.getConfiguration().getClientKeyFile();
case "clientkeypassphrase":
case "clientKeyPassphrase": return target.getConfiguration().getClientKeyPassphrase();
case "connectiontimeout":
case "connectionTimeout": return target.getConfiguration().getConnectionTimeout();
case "crdgroup":
case "crdGroup": return target.getConfiguration().getCrdGroup();
case "crdname":
case "crdName": return target.getConfiguration().getCrdName();
case "crdplural":
case "crdPlural": return target.getConfiguration().getCrdPlural();
case "crdscope":
case "crdScope": return target.getConfiguration().getCrdScope();
case "crdversion":
case "crdVersion": return target.getConfiguration().getCrdVersion();
case "dnsdomain":
case "dnsDomain": return target.getConfiguration().getDnsDomain();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "kubernetesclient":
case "kubernetesClient": return target.getConfiguration().getKubernetesClient();
case "labelkey":
case "labelKey": return target.getConfiguration().getLabelKey();
case "labelvalue":
case "labelValue": return target.getConfiguration().getLabelValue();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "namespace": return target.getConfiguration().getNamespace();
case "oauthtoken":
case "oauthToken": return target.getConfiguration().getOauthToken();
case "operation": return target.getConfiguration().getOperation();
case "password": return target.getConfiguration().getPassword();
case "poolsize":
case "poolSize": return target.getConfiguration().getPoolSize();
case "portname":
case "portName": return target.getConfiguration().getPortName();
case "portprotocol":
case "portProtocol": return target.getConfiguration().getPortProtocol();
case "resourcename":
case "resourceName": return target.getConfiguration().getResourceName();
case "trustcerts":
case "trustCerts": return target.getConfiguration().getTrustCerts();
case "username": return target.getConfiguration().getUsername();
default: return null;
}
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.inputmethod.keyboard.internal;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.Rect;
import android.os.SystemClock;
import com.udmurtlyk.extrainputmethod.latin.Constants;
import com.android.inputmethod.latin.utils.ResizableIntArray;
/**
* This class holds drawing points to represent a gesture trail. The gesture trail may contain
* multiple non-contiguous gesture strokes and will be animated asynchronously from gesture input.
*
* On the other hand, {@link GestureStrokeDrawingPoints} class holds drawing points of each gesture
* stroke. This class holds drawing points of those gesture strokes to draw as a gesture trail.
* Drawing points in this class will be asynchronously removed when fading out animation goes.
*/
final class GestureTrailDrawingPoints {
public static final boolean DEBUG_SHOW_POINTS = false;
public static final int POINT_TYPE_SAMPLED = 1;
public static final int POINT_TYPE_INTERPOLATED = 2;
private static final int DEFAULT_CAPACITY = GestureStrokeDrawingPoints.PREVIEW_CAPACITY;
// These three {@link ResizableIntArray}s should be synchronized by {@link #mEventTimes}.
private final ResizableIntArray mXCoordinates = new ResizableIntArray(DEFAULT_CAPACITY);
private final ResizableIntArray mYCoordinates = new ResizableIntArray(DEFAULT_CAPACITY);
private final ResizableIntArray mEventTimes = new ResizableIntArray(DEFAULT_CAPACITY);
private final ResizableIntArray mPointTypes = new ResizableIntArray(
DEBUG_SHOW_POINTS ? DEFAULT_CAPACITY : 0);
private int mCurrentStrokeId = -1;
// The wall time of the zero value in {@link #mEventTimes}
private long mCurrentTimeBase;
private int mTrailStartIndex;
private int mLastInterpolatedDrawIndex;
// Use this value as imaginary zero because x-coordinates may be zero.
private static final int DOWN_EVENT_MARKER = -128;
private static int markAsDownEvent(final int xCoord) {
return DOWN_EVENT_MARKER - xCoord;
}
private static boolean isDownEventXCoord(final int xCoordOrMark) {
return xCoordOrMark <= DOWN_EVENT_MARKER;
}
private static int getXCoordValue(final int xCoordOrMark) {
return isDownEventXCoord(xCoordOrMark)
? DOWN_EVENT_MARKER - xCoordOrMark : xCoordOrMark;
}
public void addStroke(final GestureStrokeDrawingPoints stroke, final long downTime) {
synchronized (mEventTimes) {
addStrokeLocked(stroke, downTime);
}
}
private void addStrokeLocked(final GestureStrokeDrawingPoints stroke, final long downTime) {
final int trailSize = mEventTimes.getLength();
stroke.appendPreviewStroke(mEventTimes, mXCoordinates, mYCoordinates, mPointTypes);
if (mEventTimes.getLength() == trailSize) {
return;
}
final int[] eventTimes = mEventTimes.getPrimitiveArray();
final int strokeId = stroke.getGestureStrokeId();
// Because interpolation algorithm in {@link GestureStrokeDrawingPoints} can't determine
// the interpolated points in the last segment of gesture stroke, it may need recalculation
// of interpolation when new segments are added to the stroke.
// {@link #mLastInterpolatedDrawIndex} holds the start index of the last segment. It may
// be updated by the interpolation
// {@link GestureStrokeDrawingPoints#interpolatePreviewStroke}
// or by animation {@link #drawGestureTrail(Canvas,Paint,Rect,GestureTrailDrawingParams)}
// below.
final int lastInterpolatedIndex = (strokeId == mCurrentStrokeId)
? mLastInterpolatedDrawIndex : trailSize;
mLastInterpolatedDrawIndex = stroke.interpolateStrokeAndReturnStartIndexOfLastSegment(
lastInterpolatedIndex, mEventTimes, mXCoordinates, mYCoordinates, mPointTypes);
if (strokeId != mCurrentStrokeId) {
final int elapsedTime = (int)(downTime - mCurrentTimeBase);
for (int i = mTrailStartIndex; i < trailSize; i++) {
// Decay the previous strokes' event times.
eventTimes[i] -= elapsedTime;
}
final int[] xCoords = mXCoordinates.getPrimitiveArray();
final int downIndex = trailSize;
xCoords[downIndex] = markAsDownEvent(xCoords[downIndex]);
mCurrentTimeBase = downTime - eventTimes[downIndex];
mCurrentStrokeId = strokeId;
}
}
/**
* Calculate the alpha of a gesture trail.
* A gesture trail starts from fully opaque. After mFadeStartDelay has been passed, the alpha
* of a trail reduces in proportion to the elapsed time. Then after mFadeDuration has been
* passed, a trail becomes fully transparent.
*
* @param elapsedTime the elapsed time since a trail has been made.
* @param params gesture trail display parameters
* @return the width of a gesture trail
*/
private static int getAlpha(final int elapsedTime, final GestureTrailDrawingParams params) {
if (elapsedTime < params.mFadeoutStartDelay) {
return Constants.Color.ALPHA_OPAQUE;
}
final int decreasingAlpha = Constants.Color.ALPHA_OPAQUE
* (elapsedTime - params.mFadeoutStartDelay)
/ params.mFadeoutDuration;
return Constants.Color.ALPHA_OPAQUE - decreasingAlpha;
}
/**
* Calculate the width of a gesture trail.
* A gesture trail starts from the width of mTrailStartWidth and reduces its width in proportion
* to the elapsed time. After mTrailEndWidth has been passed, the width becomes mTraiLEndWidth.
*
* @param elapsedTime the elapsed time since a trail has been made.
* @param params gesture trail display parameters
* @return the width of a gesture trail
*/
private static float getWidth(final int elapsedTime, final GestureTrailDrawingParams params) {
final float deltaWidth = params.mTrailStartWidth - params.mTrailEndWidth;
return params.mTrailStartWidth - (deltaWidth * elapsedTime) / params.mTrailLingerDuration;
}
private final RoundedLine mRoundedLine = new RoundedLine();
private final Rect mRoundedLineBounds = new Rect();
/**
* Draw gesture trail
* @param canvas The canvas to draw the gesture trail
* @param paint The paint object to be used to draw the gesture trail
* @param outBoundsRect the bounding box of this gesture trail drawing
* @param params The drawing parameters of gesture trail
* @return true if some gesture trails remain to be drawn
*/
public boolean drawGestureTrail(final Canvas canvas, final Paint paint,
final Rect outBoundsRect, final GestureTrailDrawingParams params) {
synchronized (mEventTimes) {
return drawGestureTrailLocked(canvas, paint, outBoundsRect, params);
}
}
private boolean drawGestureTrailLocked(final Canvas canvas, final Paint paint,
final Rect outBoundsRect, final GestureTrailDrawingParams params) {
// Initialize bounds rectangle.
outBoundsRect.setEmpty();
final int trailSize = mEventTimes.getLength();
if (trailSize == 0) {
return false;
}
final int[] eventTimes = mEventTimes.getPrimitiveArray();
final int[] xCoords = mXCoordinates.getPrimitiveArray();
final int[] yCoords = mYCoordinates.getPrimitiveArray();
final int[] pointTypes = mPointTypes.getPrimitiveArray();
final int sinceDown = (int)(SystemClock.uptimeMillis() - mCurrentTimeBase);
int startIndex;
for (startIndex = mTrailStartIndex; startIndex < trailSize; startIndex++) {
final int elapsedTime = sinceDown - eventTimes[startIndex];
// Skip too old trail points.
if (elapsedTime < params.mTrailLingerDuration) {
break;
}
}
mTrailStartIndex = startIndex;
if (startIndex < trailSize) {
paint.setColor(params.mTrailColor);
paint.setStyle(Paint.Style.FILL);
final RoundedLine roundedLine = mRoundedLine;
int p1x = getXCoordValue(xCoords[startIndex]);
int p1y = yCoords[startIndex];
final int lastTime = sinceDown - eventTimes[startIndex];
float r1 = getWidth(lastTime, params) / 2.0f;
for (int i = startIndex + 1; i < trailSize; i++) {
final int elapsedTime = sinceDown - eventTimes[i];
final int p2x = getXCoordValue(xCoords[i]);
final int p2y = yCoords[i];
final float r2 = getWidth(elapsedTime, params) / 2.0f;
// Draw trail line only when the current point isn't a down point.
if (!isDownEventXCoord(xCoords[i])) {
final float body1 = r1 * params.mTrailBodyRatio;
final float body2 = r2 * params.mTrailBodyRatio;
final Path path = roundedLine.makePath(p1x, p1y, body1, p2x, p2y, body2);
if (!path.isEmpty()) {
roundedLine.getBounds(mRoundedLineBounds);
if (params.mTrailShadowEnabled) {
final float shadow2 = r2 * params.mTrailShadowRatio;
paint.setShadowLayer(shadow2, 0.0f, 0.0f, params.mTrailColor);
final int shadowInset = -(int)Math.ceil(shadow2);
mRoundedLineBounds.inset(shadowInset, shadowInset);
}
// Take union for the bounds.
outBoundsRect.union(mRoundedLineBounds);
final int alpha = getAlpha(elapsedTime, params);
paint.setAlpha(alpha);
canvas.drawPath(path, paint);
}
}
p1x = p2x;
p1y = p2y;
r1 = r2;
}
if (DEBUG_SHOW_POINTS) {
debugDrawPoints(canvas, startIndex, trailSize, paint);
}
}
final int newSize = trailSize - startIndex;
if (newSize < startIndex) {
mTrailStartIndex = 0;
if (newSize > 0) {
System.arraycopy(eventTimes, startIndex, eventTimes, 0, newSize);
System.arraycopy(xCoords, startIndex, xCoords, 0, newSize);
System.arraycopy(yCoords, startIndex, yCoords, 0, newSize);
if (DEBUG_SHOW_POINTS) {
System.arraycopy(pointTypes, startIndex, pointTypes, 0, newSize);
}
}
mEventTimes.setLength(newSize);
mXCoordinates.setLength(newSize);
mYCoordinates.setLength(newSize);
if (DEBUG_SHOW_POINTS) {
mPointTypes.setLength(newSize);
}
// The start index of the last segment of the stroke
// {@link mLastInterpolatedDrawIndex} should also be updated because all array
// elements have just been shifted for compaction or been zeroed.
mLastInterpolatedDrawIndex = Math.max(mLastInterpolatedDrawIndex - startIndex, 0);
}
return newSize > 0;
}
private void debugDrawPoints(final Canvas canvas, final int startIndex, final int endIndex,
final Paint paint) {
final int[] xCoords = mXCoordinates.getPrimitiveArray();
final int[] yCoords = mYCoordinates.getPrimitiveArray();
final int[] pointTypes = mPointTypes.getPrimitiveArray();
// {@link Paint} that is zero width stroke and anti alias off draws exactly 1 pixel.
paint.setAntiAlias(false);
paint.setStrokeWidth(0);
for (int i = startIndex; i < endIndex; i++) {
final int pointType = pointTypes[i];
if (pointType == POINT_TYPE_INTERPOLATED) {
paint.setColor(Color.RED);
} else if (pointType == POINT_TYPE_SAMPLED) {
paint.setColor(0xFFA000FF);
} else {
paint.setColor(Color.GREEN);
}
canvas.drawPoint(getXCoordValue(xCoords[i]), yCoords[i], paint);
}
paint.setAntiAlias(true);
}
}
| |
/**
* Copyright 2017 FIX Protocol Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
*/
package io.fixprotocol.orchestra.dsl.antlr;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.math.BigDecimal;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalTime;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import io.fixprotocol.orchestra.dsl.antlr.ScoreParser.AnyExpressionContext;
import io.fixprotocol.orchestra.model.FixType;
import io.fixprotocol.orchestra.model.FixValue;
import io.fixprotocol.orchestra.model.ModelException;
import io.fixprotocol.orchestra.model.PathStep;
import io.fixprotocol.orchestra.model.SymbolResolver;
/**
* @author Don Mendelson
*
*/
public class ScoreVisitorImplTest {
private class TestData {
private final Object expected;
private final String expression;
public TestData(String expression, Object expected) {
this.expression = expression;
this.expected = expected;
}
public Object getExpected() {
return expected;
}
public String getExpression() {
return expression;
}
}
private ScoreVisitorImpl visitor;
private SymbolResolver symbolResolver;
private ErrorListener errorListener;
private class ErrorListener implements SemanticErrorListener {
private String msg = null;
@Override
public void onError(String msg) {
this.msg = msg;
}
boolean hasError() {
return msg != null;
}
}
/**
* @throws java.lang.Exception
*/
@BeforeEach
public void setUp() throws Exception {
// Only resolve variables for now
this.symbolResolver = new SymbolResolver();
//this.symbolResolver.setTrace(true);
this.errorListener = new ErrorListener();
this.visitor = new ScoreVisitorImpl(symbolResolver, errorListener);
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitAddSub(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.AddSubContext)}.
*
* @throws IOException
*/
@Test
public void testVisitAddSub() throws IOException {
TestData[] data = new TestData[] {
new TestData("33 + 44", Integer.valueOf(77)),
new TestData("44 - 33", Integer.valueOf(11)),
new TestData("7.12 + 2.34", new BigDecimal("9.46")),
new TestData("65.55 - 2.34", new BigDecimal("63.21")),
new TestData("7.12 + 2", new BigDecimal("9.12")),
};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitAnyExpression(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.AnyExpressionContext)}.
*
* @throws IOException
*/
@Test
public void testVisitAnyExpression() throws IOException {
TestData[] data =
new TestData[] {new TestData("!(33 == 4 and 5 < 6) and 12 >= 11", Boolean.TRUE),};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitAssignment(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.AssignmentContext)}.
*
* @throws IOException
*/
@Test
public void testVisitAssignment() throws IOException {
TestData[] data = new TestData[] {
new TestData("$x = 33", 33),
new TestData("$y = 33.5", new BigDecimal("33.5")),
new TestData("$a = 'a'", 'a'),
new TestData("$b = \"abc\"", "abc"),};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
@Test
public void testVisitMissingAssignment() throws IOException {
TestData[] data = new TestData[] {
new TestData("$x = $foo", 33),
};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertNull(expression);
assertTrue(errorListener.hasError());
}
}
@Test
public void testReAssignment() throws IOException {
TestData[] data = new TestData[] {
new TestData("$x = 33", 33),
new TestData("$x = 44", 44),
};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
assertEquals("x", fixValue.getName());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitBooleanNot(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.BooleanNotContext)}.
*
* @throws IOException
*/
@Test
public void testVisitBooleanNot() throws IOException {
TestData[] data = new TestData[] {new TestData("!(33 > 4)", Boolean.FALSE),
new TestData("!(33 < 4)", Boolean.TRUE),};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitCharacter(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.CharacterContext)}.
*
* @throws IOException
*/
@Test
public void testVisitCharacter() throws IOException {
final String value = "\'g\'";
ScoreParser parser = parse(value);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(FixType.charType, fixValue.getType());
assertEquals(value.charAt(1), fixValue.getValue());
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitContains(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.ContainsContext)}.
*
* @throws IOException
*/
@Test
public void testVisitContains() throws IOException {
TestData[] data = new TestData[] {new TestData("33 in {4, 7, 9}", Boolean.FALSE),
new TestData("33 in {4, 7, 9, 33}", Boolean.TRUE),
new TestData("30 + 3 in {4, 7, 9, 33}", Boolean.TRUE),};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitDecimal(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.DecimalContext)}.
*
* @throws IOException
*/
@Test
public void testVisitDecimal() throws IOException {
final String value = "456.789";
ScoreParser parser = parse(value);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(FixType.floatType, fixValue.getType());
assertEquals(new BigDecimal(value), fixValue.getValue());
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitEquality(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.EqualityContext)}.
*
* @throws IOException
*/
@Test
public void testVisitEquality() throws IOException {
TestData[] data = new TestData[] {
new TestData("33 == 4", Boolean.FALSE),
new TestData("33 != 4", Boolean.TRUE),
new TestData("33.5 == 4.0", Boolean.FALSE),
new TestData("33.5 != 4.0", Boolean.TRUE),
new TestData("4 == 4.0", Boolean.TRUE),
};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitInteger(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.IntegerContext)}.
*
* @throws IOException
*/
@Test
public void testVisitInteger() throws IOException {
final String value = "456";
ScoreParser parser = parse(value);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(FixType.intType, fixValue.getType());
assertEquals(Integer.valueOf(value), fixValue.getValue());
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitLogicalAnd(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.LogicalAndContext)}.
*
* @throws IOException
*/
@Test
public void testVisitLogicalAnd() throws IOException {
TestData[] data = new TestData[] {new TestData("33 == 4 and 5 < 6", Boolean.FALSE),
new TestData("33 == 33 and 5 < 6", Boolean.TRUE),};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitLogicalOr(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.LogicalOrContext)}.
*
* @throws IOException
*/
@Test
public void testVisitLogicalOr() throws IOException {
TestData[] data = new TestData[] {new TestData("33 == 4 or 5 < 4", Boolean.FALSE),
new TestData("33 == 4 or 5 > 4", Boolean.TRUE),};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitMulDiv(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.MulDivContext)}.
*
* @throws IOException
*/
@Test
public void testVisitMulDiv() throws IOException {
TestData[] data = new TestData[] {
new TestData("33 * 4", Integer.valueOf(132)),
new TestData("44 / 3", Integer.valueOf(14)),
new TestData("44 % 3", Integer.valueOf(2)),
new TestData("7.12 * 2.3", new BigDecimal("16.376")),
new TestData("65.55 / 2.3", new BigDecimal("28.5")),
new TestData("44.0 / 4", new BigDecimal("11.0")),
};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
@Test
public void testVisitMulDivError() throws IOException {
TestData[] data = new TestData[] {
new TestData("44 / 0", Integer.valueOf(2)),
};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertNull(expression);
assertTrue(errorListener.hasError());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitParens(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.ParensContext)}.
*
* @throws IOException
*/
@Test
public void testVisitParens() throws IOException {
TestData[] data = new TestData[] {new TestData("2 + 3 * 5", Integer.valueOf(17)),
new TestData("(2 + 3) * 5", Integer.valueOf(25))};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitRange(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.RangeContext)}.
*
* @throws IOException
*/
@Test
public void testVisitRange() throws IOException {
TestData[] data = new TestData[] {
new TestData("33 between 4 and 7", Boolean.FALSE),
new TestData("33 between 4 and 37", Boolean.TRUE),
};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitRelational(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.RelationalContext)}.
*
* @throws IOException
*/
@Test
public void testVisitRelational() throws IOException {
TestData[] data = new TestData[] {new TestData("33 < 4", Boolean.FALSE),
new TestData("33 <= 4", Boolean.FALSE), new TestData("33 <= 33", Boolean.TRUE),
new TestData("33 > 4", Boolean.TRUE), new TestData("33 >= 4", Boolean.TRUE),
new TestData("33 >= 33", Boolean.TRUE),};
for (int i = 0; i < data.length; i++) {
ScoreParser parser = parse(data[i].getExpression());
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(data[i].getExpected(), fixValue.getValue());
}
}
/**
* Test method for
* {@link io.fixprotocol.orchestra.dsl.antlr.ScoreVisitorImpl#visitString(io.fixprotocol.orchestra.dsl.antlr.ScoreParser.StringContext)}.
*
* @throws IOException
*/
@Test
public void testVisitString() throws IOException {
final String value = "\"abcde\"";
ScoreParser parser = parse(value);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(FixType.StringType, fixValue.getType());
assertEquals(value.replace("\"", ""), fixValue.getValue());
}
@Test
public void testVisitVariable() throws IOException, ScoreException, ModelException {
final String varName = "$orderCount";
symbolResolver.assign(new PathStep(varName), new FixValue<Integer>(FixType.intType, 7));
ScoreParser parser = parse(varName);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(FixType.intType, fixValue.getType());
assertEquals(7, fixValue.getValue());
}
@Test
public void testVisitExist() throws IOException, ScoreException, ModelException {
final String varName = "$orderCount";
final String dslExpression = "exists " + varName;
symbolResolver.assign(new PathStep(varName), new FixValue<Integer>(FixType.intType, 7));
ScoreParser parser = parse(dslExpression);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
@SuppressWarnings("unchecked")
FixValue<Boolean> fixValue = (FixValue<Boolean>) expression;
assertEquals(FixType.BooleanType, fixValue.getType());
assertTrue(fixValue.getValue());
}
@Test
public void testVisitExistNotFound() throws IOException, ScoreException, ModelException {
final String varName = "$orderCount";
final String dslExpression = "exists " + varName;
ScoreParser parser = parse(dslExpression);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
@SuppressWarnings("unchecked")
FixValue<Boolean> fixValue = (FixValue<Boolean>) expression;
assertEquals(FixType.BooleanType, fixValue.getType());
assertFalse(fixValue.getValue());
}
private ScoreParser parse(String expression) throws IOException {
ScoreLexer l = new ScoreLexer(CharStreams.fromString(expression));
ScoreParser p = new ScoreParser(new CommonTokenStream(l));
p.addErrorListener(new BaseErrorListener() {
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line,
int charPositionInLine, String msg, RecognitionException e) {
throw new IllegalStateException(String.format(
"Failed to parse at line %d position %d due to %s", line, charPositionInLine, msg), e);
}
});
return p;
}
@Test
public void testVisitDate() throws IOException {
final String value = "#2017-02-03#";
ScoreParser parser = parse(value);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(FixType.UTCDateOnly, fixValue.getType());
assertEquals(LocalDate.parse(value.substring(1, value.lastIndexOf('#'))), fixValue.getValue());
}
@Test
public void testVisitTimestamp() throws IOException {
final String value = "#2017-02-03T11:12:13.123456789Z#";
ScoreParser parser = parse(value);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(FixType.UTCTimestamp, fixValue.getType());
assertTrue(fixValue.getValue() instanceof Instant);
}
@Test
public void testVisitTimeOnly() throws IOException {
final String value = "#11:12:13.123456789Z#";
ScoreParser parser = parse(value);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(FixType.UTCTimeOnly, fixValue.getType());
assertTrue(fixValue.getValue() instanceof LocalTime);
}
@Test
public void testVisitDuration() throws IOException {
final String value = "#PT30S#";
ScoreParser parser = parse(value);
AnyExpressionContext ctx = parser.anyExpression();
Object expression = visitor.visitAnyExpression(ctx);
assertTrue(expression instanceof FixValue<?>);
FixValue<?> fixValue = (FixValue<?>) expression;
assertEquals(FixType.Duration, fixValue.getType());
assertTrue(fixValue.getValue() instanceof Duration);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.analysis;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockTokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.en.EnglishAnalyzer;
import org.apache.lucene.analysis.reverse.ReverseStringFilter;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.indices.analysis.PreBuiltAnalyzers;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
public class AnalysisRegistryTests extends ESTestCase {
private AnalysisRegistry emptyRegistry;
private AnalysisRegistry nonEmptyRegistry;
private static AnalyzerProvider<?> analyzerProvider(final String name) {
return new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDEX, new EnglishAnalyzer());
}
private static AnalysisRegistry emptyAnalysisRegistry(Settings settings) {
return new AnalysisRegistry(TestEnvironment.newEnvironment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(),
emptyMap(), emptyMap(), emptyMap(), emptyMap());
}
/**
* Creates a reverse filter available for use in testNameClashNormalizer test
*/
public static class MockAnalysisPlugin extends Plugin implements AnalysisPlugin {
@Override
public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
return singletonList(PreConfiguredTokenFilter.singleton("reverse", true, ReverseStringFilter::new));
}
}
private static IndexSettings indexSettingsOfCurrentVersion(Settings.Builder settings) {
return IndexSettingsModule.newIndexSettings("index", settings
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
}
@Override
public void setUp() throws Exception {
super.setUp();
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
emptyRegistry = emptyAnalysisRegistry(settings);
// Module loaded to register in-built normalizers for testing
AnalysisModule module = new AnalysisModule(TestEnvironment.newEnvironment(settings), singletonList(new MockAnalysisPlugin()));
nonEmptyRegistry = module.getAnalysisRegistry();
}
public void testDefaultAnalyzers() throws IOException {
Version version = VersionUtils.randomVersion(random());
Settings settings = Settings
.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
IndexAnalyzers indexAnalyzers = emptyRegistry.build(idxSettings);
assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
}
public void testOverrideDefaultAnalyzer() throws IOException {
Version version = VersionUtils.randomVersion(random());
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
IndexAnalyzers indexAnalyzers = emptyRegistry.build(IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default", analyzerProvider("default"))
, emptyMap(), emptyMap(), emptyMap(), emptyMap());
assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
}
public void testOverrideDefaultAnalyzerWithoutAnalysisModeAll() throws IOException {
Version version = VersionUtils.randomVersion(random());
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("index", settings);
TokenFilterFactory tokenFilter = new AbstractTokenFilterFactory(indexSettings, "my_filter", Settings.EMPTY) {
@Override
public AnalysisMode getAnalysisMode() {
return randomFrom(AnalysisMode.SEARCH_TIME, AnalysisMode.INDEX_TIME);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return tokenStream;
}
};
TokenizerFactory tokenizer = new AbstractTokenizerFactory(indexSettings, Settings.EMPTY, "my_tokenizer") {
@Override
public Tokenizer create() {
return new StandardTokenizer();
}
};
Analyzer analyzer = new CustomAnalyzer(tokenizer, new CharFilterFactory[0], new TokenFilterFactory[] { tokenFilter });
MapperException ex = expectThrows(MapperException.class,
() -> emptyRegistry.build(IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default", new PreBuiltAnalyzerProvider("default", AnalyzerScope.INDEX, analyzer)), emptyMap(),
emptyMap(), emptyMap(), emptyMap()));
assertEquals("analyzer [default] contains filters [my_filter] that are not allowed to run in all mode.", ex.getMessage());
}
public void testNameClashNormalizer() throws IOException {
// Test out-of-the-box normalizer works OK.
IndexAnalyzers indexAnalyzers = nonEmptyRegistry.build(IndexSettingsModule.newIndexSettings("index", Settings.EMPTY));
assertNotNull(indexAnalyzers.getNormalizer("lowercase"));
assertThat(indexAnalyzers.getNormalizer("lowercase").normalize("field", "AbC").utf8ToString(), equalTo("abc"));
// Test that a name clash with a custom normalizer will favour the index's normalizer rather than the out-of-the-box
// one of the same name. (However this "feature" will be removed with https://github.com/elastic/elasticsearch/issues/22263 )
Settings settings = Settings.builder()
// Deliberately bad choice of normalizer name for the job it does.
.put("index.analysis.normalizer.lowercase.type", "custom")
.putList("index.analysis.normalizer.lowercase.filter", "reverse")
.build();
indexAnalyzers = nonEmptyRegistry.build(IndexSettingsModule.newIndexSettings("index", settings));
assertNotNull(indexAnalyzers.getNormalizer("lowercase"));
assertThat(indexAnalyzers.getNormalizer("lowercase").normalize("field","AbC").utf8ToString(), equalTo("CbA"));
}
public void testOverrideDefaultIndexAnalyzerIsUnsupported() {
Version version = VersionUtils.randomIndexCompatibleVersion(random());
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
AnalyzerProvider<?> defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> emptyRegistry.build(IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default_index", defaultIndex), emptyMap(), emptyMap(), emptyMap(), emptyMap()));
assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported"));
}
public void testOverrideDefaultSearchAnalyzer() {
Version version = VersionUtils.randomVersion(random());
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
IndexAnalyzers indexAnalyzers = emptyRegistry.build(IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default_search", analyzerProvider("default_search")), emptyMap(), emptyMap(), emptyMap(), emptyMap());
assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
}
/**
* Tests that {@code camelCase} filter names and {@code snake_case} filter names don't collide.
*/
public void testConfigureCamelCaseTokenFilter() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.analysis.filter.testFilter.type", "mock")
.put("index.analysis.filter.test_filter.type", "mock")
.put("index.analysis.analyzer.custom_analyzer_with_camel_case.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer_with_camel_case.filter", "lowercase", "testFilter")
.put("index.analysis.analyzer.custom_analyzer_with_snake_case.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer_with_snake_case.filter", "lowercase", "test_filter").build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
/* The snake_case version of the name should not filter out any stopwords while the
* camelCase version will filter out English stopwords. */
AnalysisPlugin plugin = new AnalysisPlugin() {
class MockFactory extends AbstractTokenFilterFactory {
MockFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
}
@Override
public TokenStream create(TokenStream tokenStream) {
if (name().equals("test_filter")) {
return new MockTokenFilter(tokenStream, MockTokenFilter.EMPTY_STOPSET);
}
return new MockTokenFilter(tokenStream, MockTokenFilter.ENGLISH_STOPSET);
}
}
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return singletonMap("mock", MockFactory::new);
}
};
IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings),
singletonList(plugin)).getAnalysisRegistry().build(idxSettings);
// This shouldn't contain English stopwords
try (NamedAnalyzer custom_analyser = indexAnalyzers.get("custom_analyzer_with_camel_case")) {
assertNotNull(custom_analyser);
TokenStream tokenStream = custom_analyser.tokenStream("foo", "has a foo");
tokenStream.reset();
CharTermAttribute charTermAttribute = tokenStream.addAttribute(CharTermAttribute.class);
assertTrue(tokenStream.incrementToken());
assertEquals("has", charTermAttribute.toString());
assertTrue(tokenStream.incrementToken());
assertEquals("foo", charTermAttribute.toString());
assertFalse(tokenStream.incrementToken());
}
// This *should* contain English stopwords
try (NamedAnalyzer custom_analyser = indexAnalyzers.get("custom_analyzer_with_snake_case")) {
assertNotNull(custom_analyser);
TokenStream tokenStream = custom_analyser.tokenStream("foo", "has a foo");
tokenStream.reset();
CharTermAttribute charTermAttribute = tokenStream.addAttribute(CharTermAttribute.class);
assertTrue(tokenStream.incrementToken());
assertEquals("has", charTermAttribute.toString());
assertTrue(tokenStream.incrementToken());
assertEquals("a", charTermAttribute.toString());
assertTrue(tokenStream.incrementToken());
assertEquals("foo", charTermAttribute.toString());
assertFalse(tokenStream.incrementToken());
}
}
public void testBuiltInAnalyzersAreCached() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
IndexAnalyzers indexAnalyzers = emptyAnalysisRegistry(settings).build(idxSettings);
IndexAnalyzers otherIndexAnalyzers = emptyAnalysisRegistry(settings).build(idxSettings);
final int numIters = randomIntBetween(5, 20);
for (int i = 0; i < numIters; i++) {
PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values());
assertSame(indexAnalyzers.get(preBuiltAnalyzers.name()), otherIndexAnalyzers.get(preBuiltAnalyzers.name()));
}
}
public void testNoTypeOrTokenizerErrorMessage() throws IOException {
Version version = VersionUtils.randomVersion(random());
Settings settings = Settings
.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.putList("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"})
.putList("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"})
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> emptyAnalysisRegistry(settings).build(idxSettings));
assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer"));
}
public void testCloseIndexAnalyzersMultipleTimes() throws IOException {
IndexAnalyzers indexAnalyzers = emptyRegistry.build(indexSettingsOfCurrentVersion(Settings.builder()));
indexAnalyzers.close();
indexAnalyzers.close();
}
public void testEnsureCloseInvocationProperlyDelegated() throws IOException {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
PreBuiltAnalyzerProviderFactory mock = mock(PreBuiltAnalyzerProviderFactory.class);
AnalysisRegistry registry = new AnalysisRegistry(TestEnvironment.newEnvironment(settings), emptyMap(), emptyMap(),
emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), Collections.singletonMap("key", mock));
registry.close();
verify(mock).close();
}
public void testDeprecationsAndExceptions() throws IOException {
AnalysisPlugin plugin = new AnalysisPlugin() {
class MockFactory extends AbstractTokenFilterFactory {
MockFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
}
@Override
public TokenStream create(TokenStream tokenStream) {
if (indexSettings.getIndexVersionCreated().equals(Version.CURRENT)) {
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "deprecated_token_filter",
"Using deprecated token filter [deprecated]");
}
return tokenStream;
}
}
class ExceptionFactory extends AbstractTokenFilterFactory {
ExceptionFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
}
@Override
public TokenStream create(TokenStream tokenStream) {
if (indexSettings.getIndexVersionCreated().equals(Version.CURRENT)) {
throw new IllegalArgumentException("Cannot use token filter [exception]");
}
return tokenStream;
}
}
class UnusedMockFactory extends AbstractTokenFilterFactory {
UnusedMockFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
}
@Override
public TokenStream create(TokenStream tokenStream) {
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "unused_token_filter",
"Using deprecated token filter [unused]");
return tokenStream;
}
}
class NormalizerFactory extends AbstractTokenFilterFactory implements NormalizingTokenFilterFactory {
NormalizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
}
@Override
public TokenStream create(TokenStream tokenStream) {
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "deprecated_normalizer",
"Using deprecated token filter [deprecated_normalizer]");
return tokenStream;
}
}
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return Map.of("deprecated", MockFactory::new, "unused", UnusedMockFactory::new,
"deprecated_normalizer", NormalizerFactory::new, "exception", ExceptionFactory::new);
}
};
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.analysis.filter.deprecated.type", "deprecated")
.put("index.analysis.analyzer.custom.tokenizer", "standard")
.putList("index.analysis.analyzer.custom.filter", "lowercase", "deprecated")
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
new AnalysisModule(TestEnvironment.newEnvironment(settings),
singletonList(plugin)).getAnalysisRegistry().build(idxSettings);
// We should only get a warning from the token filter that is referenced in settings
assertWarnings("Using deprecated token filter [deprecated]");
indexSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.getPreviousVersion())
.put("index.analysis.filter.deprecated.type", "deprecated_normalizer")
.putList("index.analysis.normalizer.custom.filter", "lowercase", "deprecated_normalizer")
.put("index.analysis.filter.deprecated.type", "deprecated")
.put("index.analysis.filter.exception.type", "exception")
.put("index.analysis.analyzer.custom.tokenizer", "standard")
// exception will not throw because we're not on Version.CURRENT
.putList("index.analysis.analyzer.custom.filter", "lowercase", "deprecated", "exception")
.build();
idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
new AnalysisModule(TestEnvironment.newEnvironment(settings),
singletonList(plugin)).getAnalysisRegistry().build(idxSettings);
// We should only get a warning from the normalizer, because we're on a version where 'deprecated'
// works fine
assertWarnings("Using deprecated token filter [deprecated_normalizer]");
indexSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.analysis.filter.exception.type", "exception")
.put("index.analysis.analyzer.custom.tokenizer", "standard")
// exception will not throw because we're not on Version.LATEST
.putList("index.analysis.analyzer.custom.filter", "lowercase", "exception")
.build();
IndexSettings exceptionSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
new AnalysisModule(TestEnvironment.newEnvironment(settings),
singletonList(plugin)).getAnalysisRegistry().build(exceptionSettings);
});
assertEquals("Cannot use token filter [exception]", e.getMessage());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.search.geo.LegacyInMemoryGeoBoundingBoxQuery;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.geo.RandomShapeGenerator;
import org.locationtech.spatial4j.io.GeohashUtils;
import org.locationtech.spatial4j.shape.Rectangle;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBoundingBoxQueryBuilder> {
/** Randomly generate either NaN or one of the two infinity values. */
private static Double[] brokenDoubles = {Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY};
@Override
protected GeoBoundingBoxQueryBuilder doCreateTestQueryBuilder() {
GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(GEO_POINT_FIELD_NAME);
Rectangle box = RandomShapeGenerator.xRandomRectangle(random(), RandomShapeGenerator.xRandomPoint(random()));
if (randomBoolean()) {
// check the top-left/bottom-right combination of setters
int path = randomIntBetween(0, 2);
switch (path) {
case 0:
builder.setCorners(
new GeoPoint(box.getMaxY(), box.getMinX()),
new GeoPoint(box.getMinY(), box.getMaxX()));
break;
case 1:
builder.setCorners(
GeohashUtils.encodeLatLon(box.getMaxY(), box.getMinX()),
GeohashUtils.encodeLatLon(box.getMinY(), box.getMaxX()));
break;
default:
builder.setCorners(box.getMaxY(), box.getMinX(), box.getMinY(), box.getMaxX());
}
} else {
// check the bottom-left/ top-right combination of setters
if (randomBoolean()) {
builder.setCornersOGC(
new GeoPoint(box.getMinY(), box.getMinX()),
new GeoPoint(box.getMaxY(), box.getMaxX()));
} else {
builder.setCornersOGC(
GeohashUtils.encodeLatLon(box.getMinY(), box.getMinX()),
GeohashUtils.encodeLatLon(box.getMaxY(), box.getMaxX()));
}
}
if (randomBoolean()) {
builder.setValidationMethod(randomFrom(GeoValidationMethod.values()));
}
if (randomBoolean()) {
builder.ignoreUnmapped(randomBoolean());
}
builder.type(randomFrom(GeoExecType.values()));
return builder;
}
public void testValidationNullFieldname() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoBoundingBoxQueryBuilder((String) null));
assertEquals("Field name must not be empty.", e.getMessage());
}
public void testValidationNullType() {
GeoBoundingBoxQueryBuilder qb = new GeoBoundingBoxQueryBuilder("teststring");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> qb.type((GeoExecType) null));
assertEquals("Type is not allowed to be null.", e.getMessage());
}
public void testValidationNullTypeString() {
GeoBoundingBoxQueryBuilder qb = new GeoBoundingBoxQueryBuilder("teststring");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> qb.type((String) null));
assertEquals("cannot parse type from null string", e.getMessage());
}
@Override
public void testToQuery() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
super.testToQuery();
}
public void testExceptionOnMissingTypes() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length == 0);
QueryShardException e = expectThrows(QueryShardException.class, () -> super.testToQuery());
assertEquals("failed to find geo_point field [mapped_geo_point]", e.getMessage());
}
public void testBrokenCoordinateCannotBeSet() {
PointTester[] testers = { new TopTester(), new LeftTester(), new BottomTester(), new RightTester() };
GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder();
builder.setValidationMethod(GeoValidationMethod.STRICT);
for (PointTester tester : testers) {
expectThrows(IllegalArgumentException.class, () -> tester.invalidateCoordinate(builder, true));
}
}
public void testBrokenCoordinateCanBeSetWithIgnoreMalformed() {
PointTester[] testers = { new TopTester(), new LeftTester(), new BottomTester(), new RightTester() };
GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder();
builder.setValidationMethod(GeoValidationMethod.IGNORE_MALFORMED);
for (PointTester tester : testers) {
tester.invalidateCoordinate(builder, true);
}
}
public void testValidation() {
PointTester[] testers = { new TopTester(), new LeftTester(), new BottomTester(), new RightTester() };
for (PointTester tester : testers) {
QueryValidationException except = null;
GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder();
tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.COERCE), false);
except = builder.checkLatLon(true);
assertNull("Inner post 2.0 validation w/ coerce should ignore invalid "
+ tester.getClass().getName()
+ " coordinate: "
+ tester.invalidCoordinate + " ",
except);
tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.COERCE), false);
except = builder.checkLatLon(false);
assertNull("Inner pre 2.0 validation w/ coerce should ignore invalid coordinate: "
+ tester.getClass().getName()
+ " coordinate: "
+ tester.invalidCoordinate + " ",
except);
tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.STRICT), false);
except = builder.checkLatLon(true);
assertNull("Inner pre 2.0 validation w/o coerce should ignore invalid coordinate for old indexes: "
+ tester.getClass().getName()
+ " coordinate: "
+ tester.invalidCoordinate,
except);
tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.STRICT), false);
except = builder.checkLatLon(false);
assertNotNull("Inner post 2.0 validation w/o coerce should detect invalid coordinate: "
+ tester.getClass().getName()
+ " coordinate: "
+ tester.invalidCoordinate,
except);
}
}
public void testTopBottomCannotBeFlipped() {
GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder();
double top = builder.topLeft().getLat();
double left = builder.topLeft().getLon();
double bottom = builder.bottomRight().getLat();
double right = builder.bottomRight().getLon();
assumeTrue("top should not be equal to bottom for flip check", top != bottom);
logger.info("top: {} bottom: {}", top, bottom);
builder.setValidationMethod(GeoValidationMethod.STRICT);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.setCorners(bottom, left, top, right));
assertThat(e.getMessage(), containsString("top is below bottom corner:"));
}
public void testTopBottomCanBeFlippedOnIgnoreMalformed() {
GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder();
double top = builder.topLeft().getLat();
double left = builder.topLeft().getLon();
double bottom = builder.bottomRight().getLat();
double right = builder.bottomRight().getLon();
assumeTrue("top should not be equal to bottom for flip check", top != bottom);
builder.setValidationMethod(GeoValidationMethod.IGNORE_MALFORMED).setCorners(bottom, left, top, right);
}
public void testLeftRightCanBeFlipped() {
GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder();
double top = builder.topLeft().getLat();
double left = builder.topLeft().getLon();
double bottom = builder.bottomRight().getLat();
double right = builder.bottomRight().getLon();
builder.setValidationMethod(GeoValidationMethod.IGNORE_MALFORMED).setCorners(top, right, bottom, left);
builder.setValidationMethod(GeoValidationMethod.STRICT).setCorners(top, right, bottom, left);
}
public void testNormalization() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
GeoBoundingBoxQueryBuilder qb = createTestQueryBuilder();
if (getCurrentTypes().length != 0 && "mapped_geo".equals(qb.fieldName())) {
// only execute this test if we are running on a valid geo field
qb.setCorners(200, 200, qb.bottomRight().getLat(), qb.bottomRight().getLon());
qb.setValidationMethod(GeoValidationMethod.COERCE);
Query query = qb.toQuery(createShardContext());
if (query instanceof ConstantScoreQuery) {
ConstantScoreQuery result = (ConstantScoreQuery) query;
BooleanQuery bboxFilter = (BooleanQuery) result.getQuery();
for (BooleanClause clause : bboxFilter.clauses()) {
LegacyNumericRangeQuery boundary = (LegacyNumericRangeQuery) clause.getQuery();
if (boundary.getMax() != null) {
assertTrue("If defined, non of the maximum range values should be larger than 180",
boundary.getMax().intValue() <= 180);
}
}
} else {
assertTrue("memory queries should result in LegacyInMemoryGeoBoundingBoxQuery",
query instanceof LegacyInMemoryGeoBoundingBoxQuery);
}
}
}
public void testStrictnessDefault() {
assertFalse("Someone changed the default for coordinate validation - were the docs changed as well?",
GeoValidationMethod.DEFAULT_LENIENT_PARSING);
}
@Override
protected void doAssertLuceneQuery(GeoBoundingBoxQueryBuilder queryBuilder, Query query, QueryShardContext context)
throws IOException {
MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName());
if (fieldType == null) {
assertTrue("Found no indexed geo query.", query instanceof MatchNoDocsQuery);
} else {
if (context.indexVersionCreated().before(Version.V_2_2_0)) {
if (queryBuilder.type() == GeoExecType.INDEXED) {
assertTrue("Found no indexed geo query.", query instanceof ConstantScoreQuery);
} else {
assertTrue("Found no indexed geo query.", query instanceof LegacyInMemoryGeoBoundingBoxQuery);
}
} else {
assertTrue("Found no indexed geo query.", query instanceof GeoPointInBBoxQuery);
}
}
}
public abstract class PointTester {
private double brokenCoordinate = randomFrom(brokenDoubles);
private double invalidCoordinate;
public PointTester(double invalidCoodinate) {
this.invalidCoordinate = invalidCoodinate;
}
public void invalidateCoordinate(GeoBoundingBoxQueryBuilder qb, boolean useBrokenDouble) {
if (useBrokenDouble) {
fillIn(brokenCoordinate, qb);
} else {
fillIn(invalidCoordinate, qb);
}
}
protected abstract void fillIn(double fillIn, GeoBoundingBoxQueryBuilder qb);
}
public class TopTester extends PointTester {
public TopTester() {
super(randomDoubleBetween(GeoUtils.MAX_LAT, Double.MAX_VALUE, false));
}
@Override
public void fillIn(double coordinate, GeoBoundingBoxQueryBuilder qb) {
qb.setCorners(coordinate, qb.topLeft().getLon(), qb.bottomRight().getLat(), qb.bottomRight().getLon());
}
}
public class LeftTester extends PointTester {
public LeftTester() {
super(randomDoubleBetween(-Double.MAX_VALUE, GeoUtils.MIN_LON, true));
}
@Override
public void fillIn(double coordinate, GeoBoundingBoxQueryBuilder qb) {
qb.setCorners(qb.topLeft().getLat(), coordinate, qb.bottomRight().getLat(), qb.bottomRight().getLon());
}
}
public class BottomTester extends PointTester {
public BottomTester() {
super(randomDoubleBetween(-Double.MAX_VALUE, GeoUtils.MIN_LAT, false));
}
@Override
public void fillIn(double coordinate, GeoBoundingBoxQueryBuilder qb) {
qb.setCorners(qb.topLeft().getLat(), qb.topLeft().getLon(), coordinate, qb.bottomRight().getLon());
}
}
public class RightTester extends PointTester {
public RightTester() {
super(randomDoubleBetween(GeoUtils.MAX_LON, Double.MAX_VALUE, true));
}
@Override
public void fillIn(double coordinate, GeoBoundingBoxQueryBuilder qb) {
qb.setCorners(qb.topLeft().getLat(), qb.topLeft().getLon(), qb.bottomRight().getLat(), coordinate);
}
}
public void testParsingAndToQuery1() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":[-70, 40],\n" +
" \"bottom_right\":[-80, 30]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
public void testParsingAndToQuery2() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" },\n" +
" \"bottom_right\":{\n" +
" \"lat\":30,\n" +
" \"lon\":-80\n" +
" }\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
public void testParsingAndToQuery3() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":\"40, -70\",\n" +
" \"bottom_right\":\"30, -80\"\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
public void testParsingAndToQuery4() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":\"drn5x1g8cu2y\",\n" +
" \"bottom_right\":\"30, -80\"\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
public void testParsingAndToQuery5() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_right\":\"40, -80\",\n" +
" \"bottom_left\":\"30, -70\"\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
public void testParsingAndToQuery6() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"right\": -80,\n" +
" \"top\": 40,\n" +
" \"left\": -70,\n" +
" \"bottom\": 30\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
private void assertGeoBoundingBoxQuery(String query) throws IOException {
QueryShardContext shardContext = createShardContext();
Query parsedQuery = parseQuery(query).toQuery(shardContext);
if (shardContext.indexVersionCreated().before(Version.V_2_2_0)) {
LegacyInMemoryGeoBoundingBoxQuery filter = (LegacyInMemoryGeoBoundingBoxQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.topLeft().lat(), closeTo(40, 1E-5));
assertThat(filter.topLeft().lon(), closeTo(-70, 1E-5));
assertThat(filter.bottomRight().lat(), closeTo(30, 1E-5));
assertThat(filter.bottomRight().lon(), closeTo(-80, 1E-5));
} else {
GeoPointInBBoxQuery q = (GeoPointInBBoxQuery) parsedQuery;
assertThat(q.getField(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(q.getMaxLat(), closeTo(40, 1E-5));
assertThat(q.getMinLon(), closeTo(-70, 1E-5));
assertThat(q.getMinLat(), closeTo(30, 1E-5));
assertThat(q.getMaxLon(), closeTo(-80, 1E-5));
}
}
public void testFromJson() throws IOException {
String json =
"{\n" +
" \"geo_bounding_box\" : {\n" +
" \"pin.location\" : {\n" +
" \"top_left\" : [ -74.1, 40.73 ],\n" +
" \"bottom_right\" : [ -71.12, 40.01 ]\n" +
" },\n" +
" \"validation_method\" : \"STRICT\",\n" +
" \"type\" : \"MEMORY\",\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
GeoBoundingBoxQueryBuilder parsed = (GeoBoundingBoxQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, "pin.location", parsed.fieldName());
assertEquals(json, -74.1, parsed.topLeft().getLon(), 0.0001);
assertEquals(json, 40.73, parsed.topLeft().getLat(), 0.0001);
assertEquals(json, -71.12, parsed.bottomRight().getLon(), 0.0001);
assertEquals(json, 40.01, parsed.bottomRight().getLat(), 0.0001);
assertEquals(json, 1.0, parsed.boost(), 0.0001);
assertEquals(json, GeoExecType.MEMORY, parsed.type());
String deprecatedJson =
"{\n" +
" \"geo_bbox\" : {\n" +
" \"pin.location\" : {\n" +
" \"top_left\" : [ -74.1, 40.73 ],\n" +
" \"bottom_right\" : [ -71.12, 40.01 ]\n" +
" },\n" +
" \"validation_method\" : \"STRICT\",\n" +
" \"type\" : \"MEMORY\",\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
QueryBuilder parsedGeoBboxShortcut = parseQuery(json, ParseFieldMatcher.EMPTY);
assertThat(parsedGeoBboxShortcut, equalTo(parsed));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(deprecatedJson));
assertEquals("Deprecated field [geo_bbox] used, expected [geo_bounding_box] instead", e.getMessage());
}
public void testFromJsonCoerceFails() throws IOException {
String json =
"{\n" +
" \"geo_bounding_box\" : {\n" +
" \"pin.location\" : {\n" +
" \"top_left\" : [ -74.1, 40.73 ],\n" +
" \"bottom_right\" : [ -71.12, 40.01 ]\n" +
" },\n" +
" \"coerce\" : true,\n" +
" \"type\" : \"MEMORY\",\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json));
assertTrue(e.getMessage().startsWith("Deprecated field "));
}
public void testFromJsonIgnoreMalformedFails() throws IOException {
String json =
"{\n" +
" \"geo_bounding_box\" : {\n" +
" \"pin.location\" : {\n" +
" \"top_left\" : [ -74.1, 40.73 ],\n" +
" \"bottom_right\" : [ -71.12, 40.01 ]\n" +
" },\n" +
" \"ignore_malformed\" : true,\n" +
" \"type\" : \"MEMORY\",\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json));
assertTrue(e.getMessage().startsWith("Deprecated field "));
}
@Override
public void testMustRewrite() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
super.testMustRewrite();
}
public void testIgnoreUnmapped() throws IOException {
final GeoBoundingBoxQueryBuilder queryBuilder = new GeoBoundingBoxQueryBuilder("unmapped").setCorners(1.0, 0.0, 0.0, 1.0);
queryBuilder.ignoreUnmapped(true);
QueryShardContext shardContext = createShardContext();
Query query = queryBuilder.toQuery(shardContext);
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
final GeoBoundingBoxQueryBuilder failingQueryBuilder = new GeoBoundingBoxQueryBuilder("unmapped").setCorners(1.0, 0.0, 0.0, 1.0);
failingQueryBuilder.ignoreUnmapped(false);
QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(shardContext));
assertThat(e.getMessage(), containsString("failed to find geo_point field [unmapped]"));
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.test.db;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.List;
import org.flowable.common.engine.api.FlowableException;
import org.flowable.common.engine.api.FlowableIllegalArgumentException;
import org.flowable.common.engine.api.FlowableObjectNotFoundException;
import org.flowable.common.engine.impl.history.HistoryLevel;
import org.flowable.common.engine.impl.interceptor.CommandExecutor;
import org.flowable.engine.history.HistoricActivityInstance;
import org.flowable.engine.history.HistoricProcessInstance;
import org.flowable.engine.impl.cmd.SetProcessDefinitionVersionCmd;
import org.flowable.engine.impl.persistence.entity.ExecutionEntity;
import org.flowable.engine.impl.test.HistoryTestHelper;
import org.flowable.engine.impl.test.PluggableFlowableTestCase;
import org.flowable.engine.repository.ProcessDefinition;
import org.flowable.engine.runtime.Execution;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.test.Deployment;
import org.flowable.task.api.history.HistoricTaskInstance;
import org.junit.jupiter.api.Test;
/**
* @author Falko Menge
*/
public class ProcessInstanceMigrationTest extends PluggableFlowableTestCase {
private static final String TEST_PROCESS_WITH_PARALLEL_GATEWAY = "org/flowable/examples/bpmn/gateway/ParallelGatewayTest.testForkJoin.bpmn20.xml";
private static final String TEST_PROCESS = "org/flowable/engine/test/db/ProcessInstanceMigrationTest.testSetProcessDefinitionVersion.bpmn20.xml";
private static final String TEST_PROCESS_ACTIVITY_MISSING = "org/flowable/engine/test/db/ProcessInstanceMigrationTest.testSetProcessDefinitionVersionActivityMissing.bpmn20.xml";
private static final String TEST_PROCESS_CALL_ACTIVITY = "org/flowable/engine/test/db/ProcessInstanceMigrationTest.withCallActivity.bpmn20.xml";
private static final String TEST_PROCESS_USER_TASK_V1 = "org/flowable/engine/test/db/ProcessInstanceMigrationTest.testSetProcessDefinitionVersionWithTask.bpmn20.xml";
private static final String TEST_PROCESS_USER_TASK_V2 = "org/flowable/engine/test/db/ProcessInstanceMigrationTest.testSetProcessDefinitionVersionWithTaskV2.bpmn20.xml";
private static final String TEST_PROCESS_NESTED_SUB_EXECUTIONS = "org/flowable/engine/test/db/ProcessInstanceMigrationTest.testSetProcessDefinitionVersionSubExecutionsNested.bpmn20.xml";
@Test
public void testSetProcessDefinitionVersionEmptyArguments() {
assertThatThrownBy(() -> new SetProcessDefinitionVersionCmd(null, 23))
.isExactlyInstanceOf(FlowableIllegalArgumentException.class)
.hasMessage("The process instance id is mandatory, but 'null' has been provided.");
assertThatThrownBy(() -> new SetProcessDefinitionVersionCmd("", 23))
.isExactlyInstanceOf(FlowableIllegalArgumentException.class)
.hasMessage("The process instance id is mandatory, but '' has been provided.");
assertThatThrownBy(() -> new SetProcessDefinitionVersionCmd("42", null))
.isExactlyInstanceOf(FlowableIllegalArgumentException.class)
.hasMessage("The process definition version is mandatory, but 'null' has been provided.");
assertThatThrownBy(() -> new SetProcessDefinitionVersionCmd("42", -1))
.isExactlyInstanceOf(FlowableIllegalArgumentException.class)
.hasMessage("The process definition version must be positive, but '-1' has been provided.");
}
@Test
public void testSetProcessDefinitionVersionNonExistingPI() {
CommandExecutor commandExecutor = processEngineConfiguration.getCommandExecutor();
assertThatThrownBy(() -> commandExecutor.execute(new SetProcessDefinitionVersionCmd("42", 23)))
.isExactlyInstanceOf(FlowableObjectNotFoundException.class)
.hasMessage("No process instance found for id = '42'.");
}
@Test
@Deployment(resources = { TEST_PROCESS_WITH_PARALLEL_GATEWAY })
public void testSetProcessDefinitionVersionPIIsSubExecution() {
// start process instance
ProcessInstance pi = runtimeService.startProcessInstanceByKey("forkJoin");
Execution execution = runtimeService.createExecutionQuery().activityId("receivePayment").singleResult();
CommandExecutor commandExecutor = processEngineConfiguration.getCommandExecutor();
SetProcessDefinitionVersionCmd command = new SetProcessDefinitionVersionCmd(execution.getId(), 1);
assertThatThrownBy(() -> commandExecutor.execute(command))
.isInstanceOf(FlowableException.class)
.hasMessage("A process instance id is required, but the provided id '" + execution.getId() + "' points to a child execution of process instance '" + pi.getId() + "'. Please invoke the "
+ command.getClass().getSimpleName() + " with a root execution id.");
}
@Test
@Deployment(resources = { TEST_PROCESS })
public void testSetProcessDefinitionVersionNonExistingPD() {
// start process instance
ProcessInstance pi = runtimeService.startProcessInstanceByKey("receiveTask");
CommandExecutor commandExecutor = processEngineConfiguration.getCommandExecutor();
assertThatThrownBy(() -> commandExecutor.execute(new SetProcessDefinitionVersionCmd(pi.getId(), 23)))
.isExactlyInstanceOf(FlowableObjectNotFoundException.class)
.hasMessage("no processes deployed with key = 'receiveTask' and version = '23'");
}
@Test
@Deployment(resources = { TEST_PROCESS })
public void testSetProcessDefinitionVersionActivityMissing() {
// start process instance
ProcessInstance pi = runtimeService.startProcessInstanceByKey("receiveTask");
// check that receive task has been reached
Execution execution = runtimeService.createExecutionQuery().activityId("waitState1").singleResult();
assertThat(execution).isNotNull();
// deploy new version of the process definition
org.flowable.engine.repository.Deployment deployment = repositoryService.createDeployment().addClasspathResource(TEST_PROCESS_ACTIVITY_MISSING).deploy();
assertThat(repositoryService.createProcessDefinitionQuery().count()).isEqualTo(2);
// migrate process instance to new process definition version
CommandExecutor commandExecutor = processEngineConfiguration.getCommandExecutor();
SetProcessDefinitionVersionCmd setProcessDefinitionVersionCmd = new SetProcessDefinitionVersionCmd(pi.getId(), 2);
assertThatThrownBy(() -> commandExecutor.execute(setProcessDefinitionVersionCmd))
.isInstanceOf(FlowableException.class)
.hasMessageContaining("The new process definition (key = 'receiveTask') does not contain the current activity (id = 'waitState1') of the process instance (id = '");
// undeploy "manually" deployed process definition
repositoryService.deleteDeployment(deployment.getId(), true);
}
@Test
@Deployment
public void testSetProcessDefinitionVersion() {
// start process instance
ProcessInstance pi = runtimeService.startProcessInstanceByKey("receiveTask");
// check that receive task has been reached
Execution execution = runtimeService.createExecutionQuery().processInstanceId(pi.getId()).activityId("waitState1").singleResult();
assertThat(execution).isNotNull();
// deploy new version of the process definition
repositoryService.createDeployment().addClasspathResource(TEST_PROCESS).deploy();
assertThat(repositoryService.createProcessDefinitionQuery().count()).isEqualTo(2);
// migrate process instance to new process definition version
CommandExecutor commandExecutor = processEngineConfiguration.getCommandExecutor();
commandExecutor.execute(new SetProcessDefinitionVersionCmd(pi.getId(), 2));
// signal process instance
runtimeService.trigger(execution.getId());
// check that the instance now uses the new process definition version
ProcessDefinition newProcessDefinition = repositoryService.createProcessDefinitionQuery().processDefinitionVersion(2).singleResult();
pi = runtimeService.createProcessInstanceQuery().processInstanceId(pi.getId()).singleResult();
assertThat(pi.getProcessDefinitionId()).isEqualTo(newProcessDefinition.getId());
// check history
if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, processEngineConfiguration)) {
HistoricProcessInstance historicPI = historyService.createHistoricProcessInstanceQuery().processInstanceId(pi.getId()).singleResult();
assertThat(historicPI.getProcessDefinitionId()).isEqualTo(newProcessDefinition.getId());
List<HistoricActivityInstance> historicActivities = historyService
.createHistoricActivityInstanceQuery()
.processInstanceId(pi.getId())
.unfinished()
.list();
assertThat(historicActivities)
.extracting(HistoricActivityInstance::getProcessDefinitionId)
.containsExactly(newProcessDefinition.getId());
}
deleteDeployments();
}
@Test
@Deployment(resources = { TEST_PROCESS_WITH_PARALLEL_GATEWAY })
public void testSetProcessDefinitionVersionSubExecutions() {
// start process instance
ProcessInstance pi = runtimeService.startProcessInstanceByKey("forkJoin");
// check that the user tasks have been reached
assertThat(taskService.createTaskQuery().count()).isEqualTo(2);
// deploy new version of the process definition
org.flowable.engine.repository.Deployment deployment = repositoryService.createDeployment().addClasspathResource(TEST_PROCESS_WITH_PARALLEL_GATEWAY).deploy();
assertThat(repositoryService.createProcessDefinitionQuery().count()).isEqualTo(2);
// migrate process instance to new process definition version
CommandExecutor commandExecutor = processEngineConfiguration.getCommandExecutor();
commandExecutor.execute(new SetProcessDefinitionVersionCmd(pi.getId(), 2));
// check that all executions of the instance now use the new process
// definition version
ProcessDefinition newProcessDefinition = repositoryService.createProcessDefinitionQuery().processDefinitionVersion(2).singleResult();
List<Execution> executions = runtimeService.createExecutionQuery().processInstanceId(pi.getId()).list();
for (Execution execution : executions) {
assertThat(((ExecutionEntity) execution).getProcessDefinitionId()).isEqualTo(newProcessDefinition.getId());
}
// undeploy "manually" deployed process definition
repositoryService.deleteDeployment(deployment.getId(), true);
}
@Test
@Deployment(resources = { TEST_PROCESS_CALL_ACTIVITY })
public void testSetProcessDefinitionVersionWithCallActivity() {
// start process instance
ProcessInstance pi = runtimeService.startProcessInstanceByKey("parentProcess");
// check that receive task has been reached
Execution execution = runtimeService.createExecutionQuery().activityId("waitState1").processDefinitionKey("childProcess").singleResult();
assertThat(execution).isNotNull();
// deploy new version of the process definition
org.flowable.engine.repository.Deployment deployment = repositoryService.createDeployment().addClasspathResource(TEST_PROCESS_CALL_ACTIVITY).deploy();
assertThat(repositoryService.createProcessDefinitionQuery().processDefinitionKey("parentProcess").count()).isEqualTo(2);
// migrate process instance to new process definition version
CommandExecutor commandExecutor = processEngineConfiguration.getCommandExecutor();
commandExecutor.execute(new SetProcessDefinitionVersionCmd(pi.getId(), 2));
// signal process instance
runtimeService.trigger(execution.getId());
// should be finished now
assertThat(runtimeService.createProcessInstanceQuery().processInstanceId(pi.getId()).count()).isZero();
// undeploy "manually" deployed process definition
repositoryService.deleteDeployment(deployment.getId(), true);
}
@Test
@Deployment(resources = { TEST_PROCESS_USER_TASK_V1 })
public void testSetProcessDefinitionVersionWithWithTask() {
try {
// start process instance
ProcessInstance pi = runtimeService.startProcessInstanceByKey("userTask");
// check that user task has been reached
assertThat(taskService.createTaskQuery().processInstanceId(pi.getId()).count()).isEqualTo(1);
// deploy new version of the process definition
repositoryService.createDeployment().addClasspathResource(TEST_PROCESS_USER_TASK_V2).deploy();
assertThat(repositoryService.createProcessDefinitionQuery().processDefinitionKey("userTask").count()).isEqualTo(2);
ProcessDefinition newProcessDefinition = repositoryService.createProcessDefinitionQuery().processDefinitionKey("userTask").processDefinitionVersion(2).singleResult();
// migrate process instance to new process definition version
processEngineConfiguration.getCommandExecutor().execute(new SetProcessDefinitionVersionCmd(pi.getId(), 2));
// check UserTask
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(pi.getId()).singleResult();
assertThat(task.getProcessDefinitionId()).isEqualTo(newProcessDefinition.getId());
assertThat(formService.getTaskFormData(task.getId()).getFormKey()).isEqualTo("testFormKey");
if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, processEngineConfiguration)) {
HistoricTaskInstance historicTask = historyService.createHistoricTaskInstanceQuery().processInstanceId(pi.getId()).singleResult();
assertThat(historicTask.getProcessDefinitionId()).isEqualTo(newProcessDefinition.getId());
assertThat(formService.getTaskFormData(historicTask.getId()).getFormKey()).isEqualTo("testFormKey");
}
// continue
taskService.complete(task.getId());
assertProcessEnded(pi.getId());
deleteDeployments();
} catch (Exception ex) {
ex.printStackTrace();
}
}
@Test
@Deployment(resources = { TEST_PROCESS_NESTED_SUB_EXECUTIONS })
public void testSetProcessDefinitionVersionSubExecutionsNested() {
// start process instance
ProcessInstance pi = runtimeService.startProcessInstanceByKey("forkJoinNested");
// check that the user tasks have been reached
assertThat(taskService.createTaskQuery().count()).isEqualTo(2);
// deploy new version of the process definition
org.flowable.engine.repository.Deployment deployment = repositoryService.createDeployment().addClasspathResource(TEST_PROCESS_NESTED_SUB_EXECUTIONS).deploy();
assertThat(repositoryService.createProcessDefinitionQuery().count()).isEqualTo(2);
// migrate process instance to new process definition version
CommandExecutor commandExecutor = processEngineConfiguration.getCommandExecutor();
commandExecutor.execute(new SetProcessDefinitionVersionCmd(pi.getId(), 2));
// check that all executions of the instance now use the new process
// definition version
ProcessDefinition newProcessDefinition = repositoryService.createProcessDefinitionQuery().processDefinitionVersion(2).singleResult();
List<Execution> executions = runtimeService.createExecutionQuery().processInstanceId(pi.getId()).list();
for (Execution execution : executions) {
assertThat(((ExecutionEntity) execution).getProcessDefinitionId()).isEqualTo(newProcessDefinition.getId());
}
// undeploy "manually" deployed process definition
repositoryService.deleteDeployment(deployment.getId(), true);
}
}
| |
/*
* Licensed to Crate.io GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.testing;
import io.crate.analyze.relations.DocTableRelation;
import io.crate.data.BatchIterators;
import io.crate.data.Input;
import io.crate.execution.dml.upsert.GeneratedColumns;
import io.crate.execution.dml.upsert.InsertSourceGen;
import io.crate.execution.engine.collect.collectors.LuceneBatchIterator;
import io.crate.expression.InputFactory;
import io.crate.expression.reference.doc.lucene.CollectorContext;
import io.crate.expression.reference.doc.lucene.LuceneCollectorExpression;
import io.crate.expression.symbol.FunctionCopyVisitor;
import io.crate.expression.symbol.Literal;
import io.crate.expression.symbol.ParameterSymbol;
import io.crate.expression.symbol.Symbol;
import io.crate.lucene.LuceneQueryBuilder;
import io.crate.metadata.ColumnIdent;
import io.crate.metadata.CoordinatorTxnCtx;
import io.crate.metadata.Schemas;
import io.crate.metadata.doc.DocSchemaInfo;
import io.crate.metadata.doc.DocTableInfo;
import io.crate.metadata.table.SchemaInfo;
import io.crate.planner.PlannerContext;
import io.crate.planner.optimizer.symbol.Optimizer;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
import static java.util.Objects.requireNonNull;
public final class QueryTester implements AutoCloseable {
private final BiFunction<ColumnIdent, Query, LuceneBatchIterator> getIterator;
private final BiFunction<String, Object[], Symbol> expressionToSymbol;
private final Function<Symbol, Query> symbolToQuery;
private final IndexEnv indexEnv;
public static class Builder {
private final DocTableInfo table;
private final SQLExecutor sqlExecutor;
private final SqlExpressions expressions;
private final PlannerContext plannerContext;
private final DocTableRelation docTableRelation;
private final IndexEnv indexEnv;
private final LuceneQueryBuilder queryBuilder;
public Builder(Path tempDir,
ThreadPool threadPool,
ClusterService clusterService,
Version indexVersion,
String createTableStmt,
AbstractModule... additionalModules) throws IOException {
sqlExecutor = SQLExecutor
.builder(clusterService, additionalModules)
.addTable(createTableStmt)
.build();
plannerContext = sqlExecutor.getPlannerContext(clusterService.state());
DocSchemaInfo docSchema = findDocSchema(sqlExecutor.schemas());
table = (DocTableInfo) docSchema.getTables().iterator().next();
indexEnv = new IndexEnv(
threadPool,
table,
clusterService.state(),
indexVersion,
tempDir
);
queryBuilder = new LuceneQueryBuilder(plannerContext.nodeContext());
docTableRelation = new DocTableRelation(table);
expressions = new SqlExpressions(
Collections.singletonMap(table.ident(), docTableRelation),
docTableRelation
);
}
private DocSchemaInfo findDocSchema(Schemas schemas) {
for (SchemaInfo schema : schemas) {
if (schema instanceof DocSchemaInfo) {
return (DocSchemaInfo) schema;
}
}
throw new IllegalArgumentException("Create table statement must result in the creation of a user table");
}
public Builder indexValues(String column, Object ... values) throws IOException {
for (Object value : values) {
indexValue(column, value);
}
return this;
}
void indexValue(String column, Object value) throws IOException {
DocumentMapper mapper = indexEnv.mapperService().documentMapper();
InsertSourceGen sourceGen = InsertSourceGen.of(
CoordinatorTxnCtx.systemTransactionContext(),
plannerContext.nodeContext(),
table,
table.concreteIndices()[0],
GeneratedColumns.Validation.NONE,
Collections.singletonList(table.getReference(ColumnIdent.fromPath(column)))
);
BytesReference source = sourceGen.generateSourceAndCheckConstraintsAsBytesReference(new Object[]{value});
SourceToParse sourceToParse = new SourceToParse(
table.concreteIndices()[0],
UUIDs.randomBase64UUID(),
source,
XContentType.JSON
);
ParsedDocument parsedDocument = mapper.parse(sourceToParse);
indexEnv.writer().addDocuments(parsedDocument.docs());
}
private LuceneBatchIterator getIterator(ColumnIdent column, Query query) {
InputFactory inputFactory = new InputFactory(plannerContext.nodeContext());
InputFactory.Context<LuceneCollectorExpression<?>> ctx = inputFactory.ctxForRefs(
CoordinatorTxnCtx.systemTransactionContext(), indexEnv.luceneReferenceResolver());
Input<?> input = ctx.add(requireNonNull(table.getReference(column),
"column must exist in created table: " + column));
IndexSearcher indexSearcher;
try {
indexSearcher = new IndexSearcher(DirectoryReader.open(indexEnv.writer()));
} catch (IOException e) {
throw new RuntimeException(e);
}
return new LuceneBatchIterator(
indexSearcher,
query,
null,
false,
new CollectorContext(),
Collections.singletonList(input),
ctx.expressions()
);
}
public QueryTester build() throws IOException {
indexEnv.writer().commit();
CoordinatorTxnCtx systemTxnCtx = CoordinatorTxnCtx.systemTransactionContext();
return new QueryTester(
this::getIterator,
(expr, params) -> {
Symbol symbol = expressions.asSymbol(expr);
Symbol boundSymbol = symbol.accept(new FunctionCopyVisitor<>() {
@Override
public Symbol visitParameterSymbol(ParameterSymbol parameterSymbol, Object context) {
Object param = params[parameterSymbol.index()];
return Literal.ofUnchecked(
parameterSymbol.valueType(),
parameterSymbol.valueType().sanitizeValue(param)
);
}
}, null);
return Optimizer.optimizeCasts(expressions.normalize(boundSymbol), plannerContext);
},
symbol -> queryBuilder.convert(
Optimizer.optimizeCasts(symbol,plannerContext),
systemTxnCtx,
indexEnv.mapperService(),
indexEnv.indexService().index().getName(),
indexEnv.queryShardContext(),
table,
indexEnv.indexCache()
).query(),
indexEnv
);
}
}
private QueryTester(BiFunction<ColumnIdent, Query, LuceneBatchIterator> getIterator,
BiFunction<String, Object[], Symbol> expressionToSymbol,
Function<Symbol, Query> symbolToQuery,
IndexEnv indexEnv) {
this.getIterator = getIterator;
this.expressionToSymbol = expressionToSymbol;
this.symbolToQuery = symbolToQuery;
this.indexEnv = indexEnv;
}
public IndexSearcher searcher() throws IOException {
return new IndexSearcher(DirectoryReader.open(indexEnv.writer()));
}
public Query toQuery(String expression, Object ... params) {
return symbolToQuery.apply(expressionToSymbol.apply(expression, params));
}
public Query toQuery(Symbol expression) {
return symbolToQuery.apply(expression);
}
public List<Object> runQuery(String resultColumn, String expression, Object ... params) throws Exception {
Query query = toQuery(expression, params);
LuceneBatchIterator batchIterator = getIterator.apply(ColumnIdent.fromPath(resultColumn), query);
return BatchIterators.collect(
batchIterator,
Collectors.mapping(row -> row.get(0), Collectors.toList())
).get(5, TimeUnit.SECONDS);
}
@Override
public void close() throws Exception {
indexEnv.close();
}
}
| |
/**
PostGrabber.java
***********************************************************************************************************************
Description:
Revision History:
-----------------------------------------------------------------------------------------------------------------------
Date Author Reason for Change
-----------------------------------------------------------------------------------------------------------------------
01-Aug-2017 Gurpreet Singh Saini Initial Version
***********************************************************************************************************************
*/
package com.poster.grabber.service;
import java.io.File;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Calendar;
import org.apache.commons.io.FileExistsException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.client.RestTemplate;
import com.poster.grabber.model.MovieModel;
import com.poster.grabber.model.Result;
/**
*
*/
public class PostGrabber {
private String directory;
public PostGrabber(String directory) {
this.directory = directory;
}
private static final String FOLDER_THUMBNAIL = "Folder.jpg";
private static final String YEAR_PREFIX = "(";
private static final String YEAR_POSTFIX = ")";
private static final String API_KEY = "682695192e252905a45c46ff9e0011a5";
private static final String SEARCH_URL = "https://api.themoviedb.org/3/search/movie?api_key=" + API_KEY + "&query=";
private static final String POSTER_URL = "http://image.tmdb.org/t/p/w500";
private static final String FILTER_NAME_REGEX = "[\\d\\'(']";
/**
* This method gets all files of the directory
*
* @return
* @throws IOException
*/
private DirectoryStream<Path> getFiles() throws IOException {
Path path = Paths.get(this.directory);
DirectoryStream<Path> dirStream = Files.newDirectoryStream(path);
return dirStream;
}
/**
* This method verifies, if thumbnail already exists
*
* @param path
* @return
*/
private boolean isThumbnailExist(Path path) {
DirectoryStream<Path> dirStream;
try {
dirStream = Files.newDirectoryStream(path);
for (Path subPath : dirStream) {
if (subPath.getFileName().toString().equalsIgnoreCase(FOLDER_THUMBNAIL)) {
return true;
}
}
} catch (IOException e) {
System.out.println("Error while getting thumbnail for: " + path);
e.printStackTrace();
}
return false;
}
/**
* This method remove extension name from file
*
* @param name
* @return
*/
private String filterExt(String name) {
String filteredName = null;
filteredName = FilenameUtils.removeExtension(name);
return filteredName;
}
/**
* This method filters name, so that it can be searched
*
* @param name
* @return
*/
private String filterName(String name) {
String filteredName = null;
filteredName = name.replace(".", " ");
filteredName = filteredName.split(FILTER_NAME_REGEX)[0];
return filteredName;
}
/**
* This method search the movie name and gets details
*
* @param searchName
* @return
*/
private MovieModel searchName(String searchName) {
String name = filterName(searchName);
System.out.println("Getting details for: " + name);
MovieModel res = null;
try {
RestTemplate getRequest = new RestTemplate();
ResponseEntity<Result> response = getRequest.getForEntity(SEARCH_URL + name, Result.class);
if (response.getStatusCode() == HttpStatus.OK) {
Result result = response.getBody();
if (result != null && !result.getResults().isEmpty()) {
res = result.getResults().get(0);
}
}
} catch (Exception e) {
System.out.println("Exception while doing operation");
e.printStackTrace();
}
return res;
}
/**
* This method downloads poster
*
* @param movie
* @param path
*/
private void downloadPoster(MovieModel movie, Path path) {
try {
System.out.println("Downloading poster for:" + movie.getOriginal_title());
RestTemplate down = new RestTemplate();
byte[] imageBytes = down.getForObject(POSTER_URL + movie.getPoster_path(), byte[].class);
Files.write(Paths.get(path.toAbsolutePath().toString() + File.separator + FOLDER_THUMBNAIL), imageBytes);
System.out.println("Successfully created image for: " + path.getFileName());
} catch (IOException e) {
System.out.println("Exception while doing operation");
e.printStackTrace();
}
}
/**
* This method renames the diretory
*
* @param movie
* @param sourcePath
*/
private void renameDir(MovieModel movie, Path sourcePath) {
Path destPath = null;
try {
System.out.println("Renaming directory: " + movie.getOriginal_title());
// Update dir name, if contains /,?,:
String updateMovieName = movie.getOriginal_title().replaceAll(":", "");
movie.setOriginal_title(updateMovieName);
Calendar cal = Calendar.getInstance();
cal.setTime(movie.getRelease_date());
String dest = this.directory + File.separator + movie.getOriginal_title() + " " + YEAR_PREFIX
+ cal.get(Calendar.YEAR) + YEAR_POSTFIX;
destPath = Paths.get(dest);
FileUtils.moveDirectory(sourcePath.toFile(), destPath.toFile());
System.out.println("Successfully renamed to: " + dest);
} catch (FileExistsException e) {
System.out.println("Successfully renamed to: " + destPath);
} catch (Exception e) {
System.out.println("Exception while doing operation");
e.printStackTrace();
}
}
/**
* This method creates directory and moves the file into it
*
* @param movie
* @param sourceFile
*/
private void createDirAndMove(MovieModel movie, Path sourceFile) {
System.out.println("Creating directory for : " + movie.getOriginal_title());
// Update dir name, if contains /,?,:
String updateMovieName = movie.getOriginal_title().replaceAll(":", "");
movie.setOriginal_title(updateMovieName);
Calendar cal = Calendar.getInstance();
cal.setTime(movie.getRelease_date());
Path dirPath = Paths.get(this.directory + File.separator + movie.getOriginal_title() + " " + YEAR_PREFIX
+ cal.get(Calendar.YEAR) + YEAR_POSTFIX);
Path path = Paths.get(this.directory + File.separator + movie.getOriginal_title() + " " + YEAR_PREFIX
+ cal.get(Calendar.YEAR) + YEAR_POSTFIX + File.separator + sourceFile.getFileName());
try {
Files.createDirectory(dirPath);
downloadPoster(movie, dirPath);
// Move file
FileUtils.moveFile(sourceFile.toFile(), path.toFile());
} catch (IOException e) {
System.out.println("Exception while doing operation");
e.printStackTrace();
}
}
/**
* This method updates the selected directory with thumbnails
*
* @throws IOException
*/
public void update() throws IOException {
System.out.println("Grabbing posters in directory: " + this.directory);
DirectoryStream<Path> dirStream = getFiles();
dirStream.forEach(dir -> {
if (Files.isDirectory(dir)) {
// Check for existing thumbnail
if (!isThumbnailExist(dir)) {
// Get details
MovieModel movie = searchName(dir.getFileName().toString());
if (movie != null && movie.getPoster_path() != null) {
downloadPoster(movie, dir);
renameDir(movie, dir);
} else {
System.out.println("Couldn't find details for " + dir.getFileName().toString());
}
} else {
System.out.println("Thumbnail already exist for: " + dir.getFileName().toString());
}
} else {
String name = filterExt(dir.getFileName().toString());
String filteredName = filterName(name);
MovieModel movie = searchName(filteredName);
if (movie != null && movie.getPoster_path() != null) {
createDirAndMove(movie, dir);
} else {
System.out.println("Couldn't find details for " + dir.getFileName().toString());
}
}
});
}
/**
* This method deletes all the thumbnails existing in that directory
*
* @throws IOException
*/
public void delete() throws IOException {
System.out.println("Grabbing posters in directory: " + this.directory);
DirectoryStream<Path> dirStream = getFiles();
dirStream.forEach(dir -> {
if (Files.isDirectory(dir)) {
if (isThumbnailExist(dir)) {
Path path = Paths.get(dir.toAbsolutePath().toString() + File.separator + FOLDER_THUMBNAIL);
try {
Files.delete(path);
System.out
.println("Successfully deleted poster of: " + filterName(dir.getFileName().toString()));
} catch (IOException e) {
System.out.println("Unable to delete poster of: " + filterName(dir.getFileName().toString()));
e.printStackTrace();
}
}
}
});
}
/*public static void main(String[] args) {
PostGrabber postGrabber = new PostGrabber("G:\\Movies\\Angreji");
try {
postGrabber.update();
} catch (Exception e) {
e.printStackTrace();
}
}*/
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.types;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.java.typeutils.runtime.kryo.KryoSerializer;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.types.logical.AnyType;
import org.apache.flink.table.types.logical.ArrayType;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.BinaryType;
import org.apache.flink.table.types.logical.BooleanType;
import org.apache.flink.table.types.logical.CharType;
import org.apache.flink.table.types.logical.DateType;
import org.apache.flink.table.types.logical.DayTimeIntervalType;
import org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.DoubleType;
import org.apache.flink.table.types.logical.FloatType;
import org.apache.flink.table.types.logical.IntType;
import org.apache.flink.table.types.logical.LocalZonedTimestampType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.MapType;
import org.apache.flink.table.types.logical.MultisetType;
import org.apache.flink.table.types.logical.NullType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.table.types.logical.SmallIntType;
import org.apache.flink.table.types.logical.TimeType;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.flink.table.types.logical.TinyIntType;
import org.apache.flink.table.types.logical.UnresolvedUserDefinedType;
import org.apache.flink.table.types.logical.VarBinaryType;
import org.apache.flink.table.types.logical.VarCharType;
import org.apache.flink.table.types.logical.YearMonthIntervalType;
import org.apache.flink.table.types.logical.YearMonthIntervalType.YearMonthResolution;
import org.apache.flink.table.types.logical.ZonedTimestampType;
import org.apache.flink.table.types.logical.utils.LogicalTypeParser;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.apache.flink.table.types.logical.LogicalTypeRoot.UNRESOLVED;
import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.hasRoot;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.junit.Assert.assertThat;
/**
* Tests for {@link LogicalTypeParser}.
*/
@RunWith(Parameterized.class)
public class LogicalTypeParserTest {
@Parameters(name = "{index}: [From: {0}, To: {1}]")
public static List<TestSpec> testData() {
return Arrays.asList(
TestSpec
.forString("CHAR")
.expectType(new CharType()),
TestSpec
.forString("CHAR NOT NULL")
.expectType(new CharType().copy(false)),
TestSpec
.forString("CHAR NOT \t\nNULL")
.expectType(new CharType().copy(false)),
TestSpec
.forString("char not null")
.expectType(new CharType().copy(false)),
TestSpec
.forString("CHAR NULL")
.expectType(new CharType()),
TestSpec
.forString("CHAR(33)")
.expectType(new CharType(33)),
TestSpec
.forString("VARCHAR")
.expectType(new VarCharType()),
TestSpec
.forString("VARCHAR(33)")
.expectType(new VarCharType(33)),
TestSpec
.forString("STRING")
.expectType(new VarCharType(VarCharType.MAX_LENGTH)),
TestSpec
.forString("BOOLEAN")
.expectType(new BooleanType()),
TestSpec
.forString("BINARY")
.expectType(new BinaryType()),
TestSpec
.forString("BINARY(33)")
.expectType(new BinaryType(33)),
TestSpec
.forString("VARBINARY")
.expectType(new VarBinaryType()),
TestSpec
.forString("VARBINARY(33)")
.expectType(new VarBinaryType(33)),
TestSpec
.forString("BYTES")
.expectType(new VarBinaryType(VarBinaryType.MAX_LENGTH)),
TestSpec
.forString("DECIMAL")
.expectType(new DecimalType()),
TestSpec
.forString("DEC")
.expectType(new DecimalType()),
TestSpec
.forString("NUMERIC")
.expectType(new DecimalType()),
TestSpec
.forString("DECIMAL(10)")
.expectType(new DecimalType(10)),
TestSpec
.forString("DEC(10)")
.expectType(new DecimalType(10)),
TestSpec
.forString("NUMERIC(10)")
.expectType(new DecimalType(10)),
TestSpec
.forString("DECIMAL(10, 3)")
.expectType(new DecimalType(10, 3)),
TestSpec
.forString("DEC(10, 3)")
.expectType(new DecimalType(10, 3)),
TestSpec
.forString("NUMERIC(10, 3)")
.expectType(new DecimalType(10, 3)),
TestSpec
.forString("TINYINT")
.expectType(new TinyIntType()),
TestSpec
.forString("SMALLINT")
.expectType(new SmallIntType()),
TestSpec
.forString("INTEGER")
.expectType(new IntType()),
TestSpec
.forString("INT")
.expectType(new IntType()),
TestSpec
.forString("BIGINT")
.expectType(new BigIntType()),
TestSpec
.forString("FLOAT")
.expectType(new FloatType()),
TestSpec
.forString("DOUBLE")
.expectType(new DoubleType()),
TestSpec
.forString("DOUBLE PRECISION")
.expectType(new DoubleType()),
TestSpec
.forString("DATE")
.expectType(new DateType()),
TestSpec
.forString("TIME")
.expectType(new TimeType()),
TestSpec
.forString("TIME(3)")
.expectType(new TimeType(3)),
TestSpec
.forString("TIME WITHOUT TIME ZONE")
.expectType(new TimeType()),
TestSpec
.forString("TIME(3) WITHOUT TIME ZONE")
.expectType(new TimeType(3)),
TestSpec
.forString("TIMESTAMP")
.expectType(new TimestampType()),
TestSpec
.forString("TIMESTAMP(3)")
.expectType(new TimestampType(3)),
TestSpec
.forString("TIMESTAMP WITHOUT TIME ZONE")
.expectType(new TimestampType()),
TestSpec
.forString("TIMESTAMP(3) WITHOUT TIME ZONE")
.expectType(new TimestampType(3)),
TestSpec
.forString("TIMESTAMP WITH TIME ZONE")
.expectType(new ZonedTimestampType()),
TestSpec
.forString("TIMESTAMP(3) WITH TIME ZONE")
.expectType(new ZonedTimestampType(3)),
TestSpec
.forString("TIMESTAMP WITH LOCAL TIME ZONE")
.expectType(new LocalZonedTimestampType()),
TestSpec
.forString("TIMESTAMP(3) WITH LOCAL TIME ZONE")
.expectType(new LocalZonedTimestampType(3)),
TestSpec
.forString("INTERVAL YEAR")
.expectType(new YearMonthIntervalType(YearMonthResolution.YEAR)),
TestSpec
.forString("INTERVAL YEAR(4)")
.expectType(new YearMonthIntervalType(YearMonthResolution.YEAR, 4)),
TestSpec
.forString("INTERVAL MONTH")
.expectType(new YearMonthIntervalType(YearMonthResolution.MONTH)),
TestSpec
.forString("INTERVAL YEAR TO MONTH")
.expectType(new YearMonthIntervalType(YearMonthResolution.YEAR_TO_MONTH)),
TestSpec
.forString("INTERVAL YEAR(4) TO MONTH")
.expectType(new YearMonthIntervalType(YearMonthResolution.YEAR_TO_MONTH, 4)),
TestSpec
.forString("INTERVAL DAY(2) TO SECOND(3)")
.expectType(new DayTimeIntervalType(DayTimeResolution.DAY_TO_SECOND, 2, 3)),
TestSpec
.forString("INTERVAL HOUR TO SECOND(3)")
.expectType(
new DayTimeIntervalType(
DayTimeResolution.HOUR_TO_SECOND,
DayTimeIntervalType.DEFAULT_DAY_PRECISION,
3)
),
TestSpec
.forString("INTERVAL MINUTE")
.expectType(new DayTimeIntervalType(DayTimeResolution.MINUTE)),
TestSpec
.forString("ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>")
.expectType(new ArrayType(new LocalZonedTimestampType(3))),
TestSpec
.forString("ARRAY<INT NOT NULL>")
.expectType(new ArrayType(new IntType(false))),
TestSpec
.forString("INT ARRAY")
.expectType(new ArrayType(new IntType())),
TestSpec
.forString("INT NOT NULL ARRAY")
.expectType(new ArrayType(new IntType(false))),
TestSpec
.forString("INT ARRAY NOT NULL")
.expectType(new ArrayType(false, new IntType())),
TestSpec
.forString("MULTISET<INT NOT NULL>")
.expectType(new MultisetType(new IntType(false))),
TestSpec
.forString("INT MULTISET")
.expectType(new MultisetType(new IntType())),
TestSpec
.forString("INT NOT NULL MULTISET")
.expectType(new MultisetType(new IntType(false))),
TestSpec
.forString("INT MULTISET NOT NULL")
.expectType(new MultisetType(false, new IntType())),
TestSpec
.forString("MAP<BIGINT, BOOLEAN>")
.expectType(new MapType(new BigIntType(), new BooleanType())),
TestSpec
.forString("ROW<f0 INT NOT NULL, f1 BOOLEAN>")
.expectType(
new RowType(
Arrays.asList(
new RowType.RowField("f0", new IntType(false)),
new RowType.RowField("f1", new BooleanType())))
),
TestSpec
.forString("ROW(f0 INT NOT NULL, f1 BOOLEAN)")
.expectType(
new RowType(
Arrays.asList(
new RowType.RowField("f0", new IntType(false)),
new RowType.RowField("f1", new BooleanType())))
),
TestSpec
.forString("ROW<`f0` INT>")
.expectType(
new RowType(
Collections.singletonList(new RowType.RowField("f0", new IntType())))
),
TestSpec
.forString("ROW(`f0` INT)")
.expectType(
new RowType(
Collections.singletonList(new RowType.RowField("f0", new IntType())))
),
TestSpec
.forString("ROW<>")
.expectType(new RowType(Collections.emptyList())),
TestSpec
.forString("ROW()")
.expectType(new RowType(Collections.emptyList())),
TestSpec
.forString("ROW<f0 INT NOT NULL 'This is a comment.', f1 BOOLEAN 'This as well.'>")
.expectType(
new RowType(
Arrays.asList(
new RowType.RowField("f0", new IntType(false), "This is a comment."),
new RowType.RowField("f1", new BooleanType(), "This as well.")))
),
TestSpec
.forString("NULL")
.expectType(new NullType()),
TestSpec
.forString(createAnyType(LogicalTypeParserTest.class).asSerializableString())
.expectType(createAnyType(LogicalTypeParserTest.class)),
TestSpec
.forString("cat.db.MyType")
.expectType(new UnresolvedUserDefinedType("cat", "db", "MyType")),
TestSpec
.forString("`db`.`MyType`")
.expectType(new UnresolvedUserDefinedType(null, "db", "MyType")),
TestSpec
.forString("MyType")
.expectType(new UnresolvedUserDefinedType(null, null, "MyType")),
TestSpec
.forString("ARRAY<MyType>")
.expectType(new ArrayType(new UnresolvedUserDefinedType(null, null, "MyType"))),
TestSpec
.forString("ROW<f0 MyType, f1 `c`.`d`.`t`>")
.expectType(
RowType.of(
new UnresolvedUserDefinedType(null, null, "MyType"),
new UnresolvedUserDefinedType("c", "d", "t"))
),
// error message testing
TestSpec
.forString("ROW<`f0")
.expectErrorMessage("Unexpected end"),
TestSpec
.forString("ROW<`f0`")
.expectErrorMessage("Unexpected end"),
TestSpec
.forString("VARCHAR(test)")
.expectErrorMessage("<LITERAL_INT> expected"),
TestSpec
.forString("VARCHAR(33333333333)")
.expectErrorMessage("Invalid integer value"),
TestSpec
.forString("ROW<field INT, field2>")
.expectErrorMessage("<KEYWORD> expected"),
TestSpec
.forString("ANY('unknown.class', '')")
.expectErrorMessage("Unable to restore the ANY type")
);
}
@Parameter
public TestSpec testSpec;
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test
public void testParsing() {
if (testSpec.expectedType != null) {
assertThat(
LogicalTypeParser.parse(testSpec.typeString),
equalTo(testSpec.expectedType));
}
}
@Test
public void testSerializableParsing() {
if (testSpec.expectedType != null) {
if (!hasRoot(testSpec.expectedType, UNRESOLVED) &&
testSpec.expectedType.getChildren().stream().noneMatch(t -> hasRoot(t, UNRESOLVED))) {
assertThat(
LogicalTypeParser.parse(testSpec.expectedType.asSerializableString()),
equalTo(testSpec.expectedType));
}
}
}
@Test
public void testErrorMessage() {
if (testSpec.expectedErrorMessage != null) {
thrown.expect(ValidationException.class);
thrown.expectMessage(testSpec.expectedErrorMessage);
LogicalTypeParser.parse(testSpec.typeString);
}
}
// --------------------------------------------------------------------------------------------
private static class TestSpec {
private final String typeString;
private @Nullable LogicalType expectedType;
private @Nullable String expectedErrorMessage;
private TestSpec(String typeString) {
this.typeString = typeString;
}
static TestSpec forString(String typeString) {
return new TestSpec(typeString);
}
TestSpec expectType(LogicalType expectedType) {
this.expectedType = expectedType;
return this;
}
TestSpec expectErrorMessage(String expectedErrorMessage) {
this.expectedErrorMessage = expectedErrorMessage;
return this;
}
}
private static <T> AnyType<T> createAnyType(Class<T> clazz) {
return new AnyType<>(clazz, new KryoSerializer<>(clazz, new ExecutionConfig()));
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.snapshots;
import com.carrotsearch.randomizedtesting.LifecycleScope;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ListenableActionFuture;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.status.*;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
import org.elasticsearch.action.count.CountResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.SnapshotMetaData;
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.store.support.AbstractIndexStore;
import org.elasticsearch.indices.InvalidIndexNameException;
import org.elasticsearch.repositories.RepositoriesService;
import org.elasticsearch.snapshots.mockstore.MockRepositoryModule;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.junit.Test;
import java.io.File;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.cluster.metadata.IndexMetaData.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
@Slow
public class SharedClusterSnapshotRestoreTests extends AbstractSnapshotTests {
@Test
public void basicWorkFlowTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.SUITE))
.put("compress", randomBoolean())
.put("chunk_size", randomIntBetween(100, 1000))));
createIndex("test-idx-1", "test-idx-2", "test-idx-3");
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i);
index("test-idx-2", "doc", Integer.toString(i), "foo", "baz" + i);
index("test-idx-3", "doc", Integer.toString(i), "foo", "baz" + i);
}
refresh();
assertThat(client.prepareCount("test-idx-1").get().getCount(), equalTo(100L));
assertThat(client.prepareCount("test-idx-2").get().getCount(), equalTo(100L));
assertThat(client.prepareCount("test-idx-3").get().getCount(), equalTo(100L));
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-3").get();
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
logger.info("--> delete some data");
for (int i = 0; i < 50; i++) {
client.prepareDelete("test-idx-1", "doc", Integer.toString(i)).get();
}
for (int i = 50; i < 100; i++) {
client.prepareDelete("test-idx-2", "doc", Integer.toString(i)).get();
}
for (int i = 0; i < 100; i += 2) {
client.prepareDelete("test-idx-3", "doc", Integer.toString(i)).get();
}
refresh();
assertThat(client.prepareCount("test-idx-1").get().getCount(), equalTo(50L));
assertThat(client.prepareCount("test-idx-2").get().getCount(), equalTo(50L));
assertThat(client.prepareCount("test-idx-3").get().getCount(), equalTo(50L));
logger.info("--> close indices");
client.admin().indices().prepareClose("test-idx-1", "test-idx-2").get();
logger.info("--> restore all indices from the snapshot");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
assertThat(client.prepareCount("test-idx-1").get().getCount(), equalTo(100L));
assertThat(client.prepareCount("test-idx-2").get().getCount(), equalTo(100L));
assertThat(client.prepareCount("test-idx-3").get().getCount(), equalTo(50L));
// Test restore after index deletion
logger.info("--> delete indices");
cluster().wipeIndices("test-idx-1", "test-idx-2");
logger.info("--> restore one index after deletion");
restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-2").execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
assertThat(client.prepareCount("test-idx-1").get().getCount(), equalTo(100L));
ClusterState clusterState = client.admin().cluster().prepareState().get().getState();
assertThat(clusterState.getMetaData().hasIndex("test-idx-1"), equalTo(true));
assertThat(clusterState.getMetaData().hasIndex("test-idx-2"), equalTo(false));
}
@Test
public void restoreWithDifferentMappingsAndSettingsTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.SUITE))
.put("compress", randomBoolean())
.put("chunk_size", randomIntBetween(100, 1000))));
logger.info("--> create index with foo type");
assertAcked(prepareCreate("test-idx", 2, ImmutableSettings.builder()
.put(indexSettings()).put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)).put("refresh_interval", 10)));
NumShards numShards = getNumShards("test-idx");
assertAcked(client().admin().indices().preparePutMapping("test-idx").setType("foo").setSource("baz", "type=string"));
ensureGreen();
logger.info("--> snapshot it");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get();
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
logger.info("--> delete the index and recreate it with bar type");
cluster().wipeIndices("test-idx");
assertAcked(prepareCreate("test-idx", 2, ImmutableSettings.builder()
.put(SETTING_NUMBER_OF_SHARDS, numShards.numPrimaries).put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)).put("refresh_interval", 5)));
assertAcked(client().admin().indices().preparePutMapping("test-idx").setType("bar").setSource("baz", "type=string"));
ensureGreen();
logger.info("--> close index");
client.admin().indices().prepareClose("test-idx").get();
logger.info("--> restore all indices from the snapshot");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
logger.info("--> assert that old mapping is restored");
ImmutableOpenMap<String, MappingMetaData> mappings = client().admin().cluster().prepareState().get().getState().getMetaData().getIndices().get("test-idx").getMappings();
assertThat(mappings.get("foo"), notNullValue());
assertThat(mappings.get("bar"), nullValue());
logger.info("--> assert that old settings are restored");
GetSettingsResponse getSettingsResponse = client.admin().indices().prepareGetSettings("test-idx").execute().actionGet();
assertThat(getSettingsResponse.getSetting("test-idx", "index.refresh_interval"), equalTo("10"));
}
@Test
public void emptySnapshotTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", newTempDir())).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).get();
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), equalTo(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(0));
assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
}
@Test
public void restoreTemplatesTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", newTempDir())));
logger.info("--> creating test template");
assertThat(client.admin().indices().preparePutTemplate("test-template").setTemplate("te*").addMapping("test-mapping", "{}").get().isAcknowledged(), equalTo(true));
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setIndices().setWaitForCompletion(true).get();
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), equalTo(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(0));
assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
logger.info("--> delete test template");
assertThat(client.admin().indices().prepareDeleteTemplate("test-template").get().isAcknowledged(), equalTo(true));
GetIndexTemplatesResponse getIndexTemplatesResponse = client().admin().indices().prepareGetTemplates().get();
assertIndexTemplateMissing(getIndexTemplatesResponse, "test-template");
logger.info("--> restore cluster state");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setRestoreGlobalState(true).execute().actionGet();
// We don't restore any indices here
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), equalTo(0));
logger.info("--> check that template is restored");
getIndexTemplatesResponse = client().admin().indices().prepareGetTemplates().get();
assertIndexTemplateExists(getIndexTemplatesResponse, "test-template");
}
@Test
public void includeGlobalStateTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
File location = newTempDir();
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", location)));
logger.info("--> creating test template");
assertThat(client.admin().indices().preparePutTemplate("test-template").setTemplate("te*").addMapping("test-mapping", "{}").get().isAcknowledged(), equalTo(true));
logger.info("--> snapshot without global state");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-no-global-state").setIndices().setIncludeGlobalState(false).setWaitForCompletion(true).get();
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), equalTo(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(0));
assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap-no-global-state").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
logger.info("--> snapshot with global state");
createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-with-global-state").setIndices().setIncludeGlobalState(true).setWaitForCompletion(true).get();
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), equalTo(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(0));
assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap-with-global-state").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
logger.info("--> delete test template");
cluster().wipeTemplates("test-template");
GetIndexTemplatesResponse getIndexTemplatesResponse = client().admin().indices().prepareGetTemplates().get();
assertIndexTemplateMissing(getIndexTemplatesResponse, "test-template");
logger.info("--> try restoring cluster state from snapshot without global state");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-no-global-state").setWaitForCompletion(true).setRestoreGlobalState(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), equalTo(0));
logger.info("--> check that template wasn't restored");
getIndexTemplatesResponse = client().admin().indices().prepareGetTemplates().get();
assertIndexTemplateMissing(getIndexTemplatesResponse, "test-template");
logger.info("--> restore cluster state");
restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-with-global-state").setWaitForCompletion(true).setRestoreGlobalState(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), equalTo(0));
logger.info("--> check that template is restored");
getIndexTemplatesResponse = client().admin().indices().prepareGetTemplates().get();
assertIndexTemplateExists(getIndexTemplatesResponse, "test-template");
createIndex("test-idx");
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
logger.info("--> snapshot without global state but with indices");
createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-no-global-state-with-index").setIndices("test-idx").setIncludeGlobalState(false).setWaitForCompletion(true).get();
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), greaterThan(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap-no-global-state-with-index").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
logger.info("--> delete test template and index ");
cluster().wipeIndices("test-idx");
cluster().wipeTemplates("test-template");
getIndexTemplatesResponse = client().admin().indices().prepareGetTemplates().get();
assertIndexTemplateMissing(getIndexTemplatesResponse, "test-template");
logger.info("--> try restoring index and cluster state from snapshot without global state");
restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-no-global-state-with-index").setWaitForCompletion(true).setRestoreGlobalState(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0));
ensureGreen();
logger.info("--> check that template wasn't restored but index was");
getIndexTemplatesResponse = client().admin().indices().prepareGetTemplates().get();
assertIndexTemplateMissing(getIndexTemplatesResponse, "test-template");
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
}
@Test
public void snapshotFileFailureDuringSnapshotTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType(MockRepositoryModule.class.getCanonicalName()).setSettings(
ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.TEST))
.put("random", randomAsciiOfLength(10))
.put("random_control_io_exception_rate", 0.2)));
createIndex("test-idx");
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
logger.info("--> snapshot");
try {
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get();
if (createSnapshotResponse.getSnapshotInfo().totalShards() == createSnapshotResponse.getSnapshotInfo().successfulShards()) {
// If we are here, that means we didn't have any failures, let's check it
assertThat(getFailureCount("test-repo"), equalTo(0L));
} else {
assertThat(getFailureCount("test-repo"), greaterThan(0L));
assertThat(createSnapshotResponse.getSnapshotInfo().shardFailures().size(), greaterThan(0));
for (SnapshotShardFailure shardFailure : createSnapshotResponse.getSnapshotInfo().shardFailures()) {
assertThat(shardFailure.reason(), containsString("Random IOException"));
assertThat(shardFailure.nodeId(), notNullValue());
assertThat(shardFailure.index(), equalTo("test-idx"));
}
GetSnapshotsResponse getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots("test-repo").addSnapshots("test-snap").get();
assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(1));
SnapshotInfo snapshotInfo = getSnapshotsResponse.getSnapshots().get(0);
if (snapshotInfo.state() == SnapshotState.SUCCESS) {
assertThat(snapshotInfo.shardFailures().size(), greaterThan(0));
assertThat(snapshotInfo.totalShards(), greaterThan(snapshotInfo.successfulShards()));
}
}
} catch (Exception ex) {
assertThat(getFailureCount("test-repo"), greaterThan(0L));
assertThat(ExceptionsHelper.detailedMessage(ex), containsString("IOException"));
}
}
@Test
public void dataFileFailureDuringSnapshotTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType(MockRepositoryModule.class.getCanonicalName()).setSettings(
ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.TEST))
.put("random", randomAsciiOfLength(10))
.put("random_data_file_io_exception_rate", 0.3)));
createIndex("test-idx");
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get();
if (createSnapshotResponse.getSnapshotInfo().totalShards() == createSnapshotResponse.getSnapshotInfo().successfulShards()) {
logger.info("--> no failures");
// If we are here, that means we didn't have any failures, let's check it
assertThat(getFailureCount("test-repo"), equalTo(0L));
} else {
logger.info("--> some failures");
assertThat(getFailureCount("test-repo"), greaterThan(0L));
assertThat(createSnapshotResponse.getSnapshotInfo().shardFailures().size(), greaterThan(0));
for (SnapshotShardFailure shardFailure : createSnapshotResponse.getSnapshotInfo().shardFailures()) {
assertThat(shardFailure.nodeId(), notNullValue());
assertThat(shardFailure.index(), equalTo("test-idx"));
}
GetSnapshotsResponse getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots("test-repo").addSnapshots("test-snap").get();
assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(1));
SnapshotInfo snapshotInfo = getSnapshotsResponse.getSnapshots().get(0);
assertThat(snapshotInfo.state(), equalTo(SnapshotState.PARTIAL));
assertThat(snapshotInfo.shardFailures().size(), greaterThan(0));
assertThat(snapshotInfo.totalShards(), greaterThan(snapshotInfo.successfulShards()));
// Verify that snapshot status also contains the same failures
SnapshotsStatusResponse snapshotsStatusResponse = client.admin().cluster().prepareSnapshotStatus("test-repo").addSnapshots("test-snap").get();
assertThat(snapshotsStatusResponse.getSnapshots().size(), equalTo(1));
SnapshotStatus snapshotStatus = snapshotsStatusResponse.getSnapshots().get(0);
assertThat(snapshotStatus.getIndices().size(), equalTo(1));
SnapshotIndexStatus indexStatus = snapshotStatus.getIndices().get("test-idx");
assertThat(indexStatus, notNullValue());
assertThat(indexStatus.getShardsStats().getFailedShards(), equalTo(snapshotInfo.failedShards()));
assertThat(indexStatus.getShardsStats().getDoneShards(), equalTo(snapshotInfo.successfulShards()));
assertThat(indexStatus.getShards().size(), equalTo(snapshotInfo.totalShards()));
int numberOfFailures = 0;
for (SnapshotIndexShardStatus shardStatus : indexStatus.getShards().values()) {
if (shardStatus.getStage() == SnapshotIndexShardStage.FAILURE) {
assertThat(shardStatus.getFailure(), notNullValue());
numberOfFailures++;
} else {
assertThat(shardStatus.getFailure(), nullValue());
}
}
assertThat(indexStatus.getShardsStats().getFailedShards(), equalTo(numberOfFailures));
}
}
@Test
public void dataFileFailureDuringRestoreTest() throws Exception {
File repositoryLocation = newTempDir(LifecycleScope.TEST);
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", repositoryLocation)));
createIndex("test-idx");
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get();
assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), equalTo(createSnapshotResponse.getSnapshotInfo().successfulShards()));
logger.info("--> update repository with mock version");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType(MockRepositoryModule.class.getCanonicalName()).setSettings(
ImmutableSettings.settingsBuilder()
.put("location", repositoryLocation)
.put("random", randomAsciiOfLength(10))
.put("random_data_file_io_exception_rate", 0.3)));
// Test restore after index deletion
logger.info("--> delete index");
cluster().wipeIndices("test-idx");
logger.info("--> restore index after deletion");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
CountResponse countResponse = client.prepareCount("test-idx").get();
assertThat(countResponse.getCount(), equalTo(100L));
}
@Test
@TestLogging("snapshots:TRACE")
public void deletionOfFailingToRecoverIndexShouldStopRestore() throws Exception {
File repositoryLocation = newTempDir(LifecycleScope.TEST);
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", repositoryLocation)));
createIndex("test-idx");
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get();
assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), equalTo(createSnapshotResponse.getSnapshotInfo().successfulShards()));
logger.info("--> update repository with mock version");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType(MockRepositoryModule.class.getCanonicalName()).setSettings(
ImmutableSettings.settingsBuilder()
.put("location", repositoryLocation)
.put("random", randomAsciiOfLength(10))
.put("random_data_file_io_exception_rate", 1.0) // Fail completely
));
// Test restore after index deletion
logger.info("--> delete index");
cluster().wipeIndices("test-idx");
logger.info("--> restore index after deletion");
ListenableActionFuture<RestoreSnapshotResponse> restoreSnapshotResponseFuture =
client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute();
logger.info("--> wait for the index to appear");
// that would mean that recovery process started and failing
assertThat(waitForIndex("test-idx", TimeValue.timeValueSeconds(10)), equalTo(true));
logger.info("--> delete index");
cluster().wipeIndices("test-idx");
logger.info("--> get restore results");
// Now read restore results and make sure it failed
RestoreSnapshotResponse restoreSnapshotResponse = restoreSnapshotResponseFuture.actionGet(TimeValue.timeValueSeconds(10));
assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), greaterThan(0));
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), equalTo(restoreSnapshotResponse.getRestoreInfo().failedShards()));
logger.info("--> restoring working repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", repositoryLocation)));
logger.info("--> trying to restore index again");
restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0));
ensureGreen();
CountResponse countResponse = client.prepareCount("test-idx").get();
assertThat(countResponse.getCount(), equalTo(100L));
}
@Test
public void unallocatedShardsTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.SUITE))));
logger.info("--> creating index that cannot be allocated");
prepareCreate("test-idx", 2, ImmutableSettings.builder().put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + ".tag", "nowhere").put("index.number_of_shards", 3)).get();
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get();
assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.FAILED));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(0));
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), equalTo(0));
assertThat(createSnapshotResponse.getSnapshotInfo().reason(), startsWith("Indices don't have primary shards"));
}
@Test
public void deleteSnapshotTest() throws Exception {
final int numberOfSnapshots = between(5, 15);
Client client = client();
File repo = newTempDir(LifecycleScope.SUITE);
logger.info("--> creating repository at " + repo.getAbsolutePath());
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", repo)
.put("compress", false)
.put("chunk_size", randomIntBetween(100, 1000))));
createIndex("test-idx");
ensureGreen();
int[] numberOfFiles = new int[numberOfSnapshots];
logger.info("--> creating {} snapshots ", numberOfSnapshots);
for (int i = 0; i < numberOfSnapshots; i++) {
for (int j = 0; j < 10; j++) {
index("test-idx", "doc", Integer.toString(i * 10 + j), "foo", "bar" + i * 10 + j);
}
refresh();
logger.info("--> snapshot {}", i);
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-" + i).setWaitForCompletion(true).setIndices("test-idx").get();
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
// Store number of files after each snapshot
numberOfFiles[i] = numberOfFiles(repo);
}
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(10L * numberOfSnapshots));
int numberOfFilesBeforeDeletion = numberOfFiles(repo);
logger.info("--> delete all snapshots except the first one and last one");
for (int i = 1; i < numberOfSnapshots - 1; i++) {
client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap-" + i).get();
}
int numberOfFilesAfterDeletion = numberOfFiles(repo);
assertThat(numberOfFilesAfterDeletion, lessThan(numberOfFilesBeforeDeletion));
logger.info("--> delete index");
cluster().wipeIndices("test-idx");
logger.info("--> restore index");
String lastSnapshot = "test-snap-" + (numberOfSnapshots - 1);
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", lastSnapshot).setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(10L * numberOfSnapshots));
logger.info("--> delete the last snapshot");
client.admin().cluster().prepareDeleteSnapshot("test-repo", lastSnapshot).get();
logger.info("--> make sure that number of files is back to what it was when the first snapshot was made");
assertThat(numberOfFiles(repo), equalTo(numberOfFiles[0]));
}
@Test
public void deleteSnapshotWithMissingIndexAndShardMetadataTest() throws Exception {
Client client = client();
File repo = newTempDir(LifecycleScope.SUITE);
logger.info("--> creating repository at " + repo.getAbsolutePath());
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", repo)
.put("compress", false)
.put("chunk_size", randomIntBetween(100, 1000))));
createIndex("test-idx-1", "test-idx-2");
ensureYellow();
logger.info("--> indexing some data");
indexRandom(true,
client().prepareIndex("test-idx-1", "doc").setSource("foo", "bar"),
client().prepareIndex("test-idx-2", "doc").setSource("foo", "bar"));
logger.info("--> creating snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-1").setWaitForCompletion(true).setIndices("test-idx-*").get();
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
logger.info("--> delete index metadata and shard metadata");
File indices = new File(repo, "indices");
File testIndex1 = new File(indices, "test-idx-1");
File testIndex2 = new File(indices, "test-idx-2");
File testIndex2Shard0 = new File(testIndex2, "0");
new File(testIndex1, "snapshot-test-snap-1").delete();
new File(testIndex2Shard0, "snapshot-test-snap-1").delete();
logger.info("--> delete snapshot");
client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap-1").get();
logger.info("--> make sure snapshot doesn't exist");
assertThrows(client.admin().cluster().prepareGetSnapshots("test-repo").addSnapshots("test-snap-1"), SnapshotMissingException.class);
}
@Test
@TestLogging("snapshots:TRACE")
public void snapshotClosedIndexTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.SUITE))));
createIndex("test-idx", "test-idx-closed");
ensureGreen();
logger.info("--> closing index test-idx-closed");
assertAcked(client.admin().indices().prepareClose("test-idx-closed"));
ClusterStateResponse stateResponse = client.admin().cluster().prepareState().get();
assertThat(stateResponse.getState().metaData().index("test-idx-closed").state(), equalTo(State.CLOSE));
assertThat(stateResponse.getState().routingTable().index("test-idx-closed"), nullValue());
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx*").get();
assertThat(createSnapshotResponse.getSnapshotInfo().indices().size(), equalTo(1));
assertThat(createSnapshotResponse.getSnapshotInfo().shardFailures().size(), equalTo(0));
logger.info("--> deleting snapshot");
client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap").get();
}
@Test
public void renameOnRestoreTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.SUITE))));
createIndex("test-idx-1", "test-idx-2");
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i);
index("test-idx-2", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx-1").get().getCount(), equalTo(100L));
assertThat(client.prepareCount("test-idx-2").get().getCount(), equalTo(100L));
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-1", "test-idx-2").get();
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
logger.info("--> restore indices with different names");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap")
.setRenamePattern("(.+)").setRenameReplacement("$1-copy").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
assertThat(client.prepareCount("test-idx-1-copy").get().getCount(), equalTo(100L));
assertThat(client.prepareCount("test-idx-2-copy").get().getCount(), equalTo(100L));
logger.info("--> close just restored indices");
client.admin().indices().prepareClose("test-idx-1-copy", "test-idx-2-copy").get();
logger.info("--> and try to restore these indices again");
restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap")
.setRenamePattern("(.+)").setRenameReplacement("$1-copy").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
assertThat(client.prepareCount("test-idx-1-copy").get().getCount(), equalTo(100L));
assertThat(client.prepareCount("test-idx-2-copy").get().getCount(), equalTo(100L));
logger.info("--> close indices");
assertAcked(client.admin().indices().prepareClose("test-idx-1", "test-idx-2-copy"));
logger.info("--> restore indices with different names");
restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap")
.setRenamePattern("(.+-2)").setRenameReplacement("$1-copy").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
logger.info("--> try renaming indices using the same name");
try {
client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setRenamePattern("(.+)").setRenameReplacement("same-name").setWaitForCompletion(true).execute().actionGet();
fail("Shouldn't be here");
} catch (SnapshotRestoreException ex) {
// Expected
}
logger.info("--> try renaming indices using the same name");
try {
client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setRenamePattern("test-idx-2").setRenameReplacement("test-idx-1").setWaitForCompletion(true).execute().actionGet();
fail("Shouldn't be here");
} catch (SnapshotRestoreException ex) {
// Expected
}
logger.info("--> try renaming indices using invalid index name");
try {
client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setIndices("test-idx-1").setRenamePattern(".+").setRenameReplacement("__WRONG__").setWaitForCompletion(true).execute().actionGet();
fail("Shouldn't be here");
} catch (InvalidIndexNameException ex) {
// Expected
}
}
@Test
@TestLogging("cluster.routing.allocation.decider:TRACE")
public void moveShardWhileSnapshottingTest() throws Exception {
Client client = client();
File repositoryLocation = newTempDir(LifecycleScope.TEST);
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType(MockRepositoryModule.class.getCanonicalName()).setSettings(
ImmutableSettings.settingsBuilder()
.put("location", repositoryLocation)
.put("random", randomAsciiOfLength(10))
.put("wait_after_unblock", 200)));
// Create index on 2 nodes and make sure each node has a primary by setting no replicas
assertAcked(prepareCreate("test-idx", 2, ImmutableSettings.builder().put("number_of_replicas", 0)));
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
// Pick one node and block it
String blockedNode = blockNodeWithIndex("test-idx");
logger.info("--> snapshot");
client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(false).setIndices("test-idx").get();
logger.info("--> waiting for block to kick in");
waitForBlock(blockedNode, "test-repo", TimeValue.timeValueSeconds(60));
logger.info("--> execution was blocked on node [{}], moving shards away from this node", blockedNode);
ImmutableSettings.Builder excludeSettings = ImmutableSettings.builder().put("index.routing.allocation.exclude._name", blockedNode);
client().admin().indices().prepareUpdateSettings("test-idx").setSettings(excludeSettings).get();
logger.info("--> unblocking blocked node");
unblockNode(blockedNode);
logger.info("--> waiting for completion");
SnapshotInfo snapshotInfo = waitForCompletion("test-repo", "test-snap", TimeValue.timeValueSeconds(600));
logger.info("Number of failed shards [{}]", snapshotInfo.shardFailures().size());
logger.info("--> done");
ImmutableList<SnapshotInfo> snapshotInfos = client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get().getSnapshots();
assertThat(snapshotInfos.size(), equalTo(1));
assertThat(snapshotInfos.get(0).state(), equalTo(SnapshotState.SUCCESS));
assertThat(snapshotInfos.get(0).shardFailures().size(), equalTo(0));
logger.info("--> delete index");
cluster().wipeIndices("test-idx");
logger.info("--> replace mock repository with real one at the same location");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", repositoryLocation)));
logger.info("--> restore index");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
}
@Test
@TestLogging("cluster.routing.allocation.decider:TRACE")
public void deleteRepositoryWhileSnapshottingTest() throws Exception {
Client client = client();
File repositoryLocation = newTempDir(LifecycleScope.TEST);
logger.info("--> creating repository");
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType(MockRepositoryModule.class.getCanonicalName()).setSettings(
ImmutableSettings.settingsBuilder()
.put("location", repositoryLocation)
.put("random", randomAsciiOfLength(10))
.put("wait_after_unblock", 200)
).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
// Create index on 2 nodes and make sure each node has a primary by setting no replicas
assertAcked(prepareCreate("test-idx", 2, ImmutableSettings.builder().put("number_of_replicas", 0)));
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
// Pick one node and block it
String blockedNode = blockNodeWithIndex("test-idx");
logger.info("--> snapshot");
client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(false).setIndices("test-idx").get();
logger.info("--> waiting for block to kick in");
waitForBlock(blockedNode, "test-repo", TimeValue.timeValueSeconds(60));
logger.info("--> execution was blocked on node [{}], trying to delete repository", blockedNode);
try {
client.admin().cluster().prepareDeleteRepository("test-repo").execute().get();
fail("shouldn't be able to delete in-use repository");
} catch (Exception ex) {
logger.info("--> in-use repository deletion failed");
}
logger.info("--> trying to move repository to another location");
try {
client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", new File(repositoryLocation, "test"))
).get();
fail("shouldn't be able to replace in-use repository");
} catch (Exception ex) {
logger.info("--> in-use repository replacement failed");
}
logger.info("--> trying to create a repository with different name");
assertAcked(client.admin().cluster().preparePutRepository("test-repo-2")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", new File(repositoryLocation, "test"))));
logger.info("--> unblocking blocked node");
unblockNode(blockedNode);
logger.info("--> waiting for completion");
SnapshotInfo snapshotInfo = waitForCompletion("test-repo", "test-snap", TimeValue.timeValueSeconds(600));
logger.info("Number of failed shards [{}]", snapshotInfo.shardFailures().size());
logger.info("--> done");
ImmutableList<SnapshotInfo> snapshotInfos = client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get().getSnapshots();
assertThat(snapshotInfos.size(), equalTo(1));
assertThat(snapshotInfos.get(0).state(), equalTo(SnapshotState.SUCCESS));
assertThat(snapshotInfos.get(0).shardFailures().size(), equalTo(0));
logger.info("--> delete index");
cluster().wipeIndices("test-idx");
logger.info("--> replace mock repository with real one at the same location");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", repositoryLocation)));
logger.info("--> restore index");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
}
@Test
public void urlRepositoryTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
File repositoryLocation = newTempDir(LifecycleScope.SUITE);
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", repositoryLocation)
.put("compress", randomBoolean())
.put("chunk_size", randomIntBetween(100, 1000))));
createIndex("test-idx");
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get();
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
logger.info("--> delete index");
cluster().wipeIndices("test-idx");
logger.info("--> create read-only URL repository");
assertAcked(client.admin().cluster().preparePutRepository("url-repo")
.setType("url").setSettings(ImmutableSettings.settingsBuilder()
.put("url", repositoryLocation.toURI().toURL())
.put("list_directories", randomBoolean())));
logger.info("--> restore index after deletion");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("url-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
logger.info("--> list available shapshots");
GetSnapshotsResponse getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots("url-repo").get();
assertThat(getSnapshotsResponse.getSnapshots(), notNullValue());
assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(1));
logger.info("--> delete snapshot");
DeleteSnapshotResponse deleteSnapshotResponse = client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap").get();
assertAcked(deleteSnapshotResponse);
logger.info("--> list available shapshot again, no snapshots should be returned");
getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots("url-repo").get();
assertThat(getSnapshotsResponse.getSnapshots(), notNullValue());
assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(0));
}
@Test
public void throttlingTest() throws Exception {
Client client = client();
logger.info("--> creating repository");
File repositoryLocation = newTempDir(LifecycleScope.SUITE);
boolean throttleSnapshot = randomBoolean();
boolean throttleRestore = randomBoolean();
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", repositoryLocation)
.put("compress", randomBoolean())
.put("chunk_size", randomIntBetween(100, 1000))
.put("max_restore_bytes_per_sec", throttleRestore ? "2.5k" : "0")
.put("max_snapshot_bytes_per_sec", throttleSnapshot ? "2.5k" : "0")));
createIndex("test-idx");
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get();
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
logger.info("--> delete index");
cluster().wipeIndices("test-idx");
logger.info("--> restore index");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
ensureGreen();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
long snapshotPause = 0L;
long restorePause = 0L;
for (RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) {
snapshotPause += repositoriesService.repository("test-repo").snapshotThrottleTimeInNanos();
restorePause += repositoriesService.repository("test-repo").restoreThrottleTimeInNanos();
}
if (throttleSnapshot) {
assertThat(snapshotPause, greaterThan(0L));
} else {
assertThat(snapshotPause, equalTo(0L));
}
if (throttleRestore) {
assertThat(restorePause, greaterThan(0L));
} else {
assertThat(restorePause, equalTo(0L));
}
}
@Test
@TestLogging("cluster.routing.allocation.decider:TRACE")
public void snapshotStatusTest() throws Exception {
Client client = client();
File repositoryLocation = newTempDir(LifecycleScope.TEST);
logger.info("--> creating repository");
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType(MockRepositoryModule.class.getCanonicalName()).setSettings(
ImmutableSettings.settingsBuilder()
.put("location", repositoryLocation)
.put("random", randomAsciiOfLength(10))
.put("wait_after_unblock", 200)
).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
// Create index on 2 nodes and make sure each node has a primary by setting no replicas
assertAcked(prepareCreate("test-idx", 2, ImmutableSettings.builder().put("number_of_replicas", 0)));
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
// Pick one node and block it
String blockedNode = blockNodeWithIndex("test-idx");
logger.info("--> snapshot");
client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(false).setIndices("test-idx").get();
logger.info("--> waiting for block to kick in");
waitForBlock(blockedNode, "test-repo", TimeValue.timeValueSeconds(60));
logger.info("--> execution was blocked on node [{}], checking snapshot status with specified repository and snapshot", blockedNode);
SnapshotsStatusResponse response = client.admin().cluster().prepareSnapshotStatus("test-repo").execute().actionGet();
assertThat(response.getSnapshots().size(), equalTo(1));
SnapshotStatus snapshotStatus = response.getSnapshots().get(0);
assertThat(snapshotStatus.getState(), equalTo(SnapshotMetaData.State.STARTED));
// We blocked the node during data write operation, so at least one shard snapshot should be in STARTED stage
assertThat(snapshotStatus.getShardsStats().getStartedShards(), greaterThan(0));
for (SnapshotIndexShardStatus shardStatus : snapshotStatus.getIndices().get("test-idx")) {
if (shardStatus.getStage() == SnapshotIndexShardStage.STARTED) {
assertThat(shardStatus.getNodeId(), notNullValue());
}
}
logger.info("--> checking snapshot status for all currently running and snapshot with empty repository", blockedNode);
response = client.admin().cluster().prepareSnapshotStatus().execute().actionGet();
assertThat(response.getSnapshots().size(), equalTo(1));
snapshotStatus = response.getSnapshots().get(0);
assertThat(snapshotStatus.getState(), equalTo(SnapshotMetaData.State.STARTED));
// We blocked the node during data write operation, so at least one shard snapshot should be in STARTED stage
assertThat(snapshotStatus.getShardsStats().getStartedShards(), greaterThan(0));
for (SnapshotIndexShardStatus shardStatus : snapshotStatus.getIndices().get("test-idx")) {
if (shardStatus.getStage() == SnapshotIndexShardStage.STARTED) {
assertThat(shardStatus.getNodeId(), notNullValue());
}
}
logger.info("--> unblocking blocked node");
unblockNode(blockedNode);
SnapshotInfo snapshotInfo = waitForCompletion("test-repo", "test-snap", TimeValue.timeValueSeconds(600));
logger.info("Number of failed shards [{}]", snapshotInfo.shardFailures().size());
logger.info("--> done");
logger.info("--> checking snapshot status again after snapshot is done", blockedNode);
response = client.admin().cluster().prepareSnapshotStatus("test-repo").addSnapshots("test-snap").execute().actionGet();
snapshotStatus = response.getSnapshots().get(0);
assertThat(snapshotStatus.getIndices().size(), equalTo(1));
SnapshotIndexStatus indexStatus = snapshotStatus.getIndices().get("test-idx");
assertThat(indexStatus, notNullValue());
assertThat(indexStatus.getShardsStats().getInitializingShards(), equalTo(0));
assertThat(indexStatus.getShardsStats().getFailedShards(), equalTo(snapshotInfo.failedShards()));
assertThat(indexStatus.getShardsStats().getDoneShards(), equalTo(snapshotInfo.successfulShards()));
assertThat(indexStatus.getShards().size(), equalTo(snapshotInfo.totalShards()));
logger.info("--> checking snapshot status after it is done with empty repository", blockedNode);
response = client.admin().cluster().prepareSnapshotStatus().execute().actionGet();
assertThat(response.getSnapshots().size(), equalTo(0));
try {
client.admin().cluster().prepareSnapshotStatus("test-repo").addSnapshots("test-snap-doesnt-exist").execute().actionGet();
fail();
} catch (SnapshotMissingException ex) {
// Expected
}
}
@Test
public void snapshotRelocatingPrimary() throws Exception {
Client client = client();
logger.info("--> creating repository");
assertAcked(client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.SUITE))
.put("compress", randomBoolean())
.put("chunk_size", randomIntBetween(100, 1000))));
// Create index on 1 nodes and make sure each node has a primary by setting no replicas
assertAcked(prepareCreate("test-idx", 1, ImmutableSettings.builder().put("number_of_replicas", 0)));
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
// Update settings to make sure that relocation is slow so we can start snapshot before relocation is finished
assertAcked(client.admin().indices().prepareUpdateSettings("test-idx").setSettings(ImmutableSettings.builder()
.put(AbstractIndexStore.INDEX_STORE_THROTTLE_TYPE, "all")
.put(AbstractIndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, 100)
));
logger.info("--> start relocations");
allowNodes("test-idx", internalCluster().numDataNodes());
logger.info("--> wait for relocations to start");
waitForRelocationsToStart("test-idx", TimeValue.timeValueMillis(300));
logger.info("--> snapshot");
client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(false).setIndices("test-idx").get();
// Update settings to back to normal
assertAcked(client.admin().indices().prepareUpdateSettings("test-idx").setSettings(ImmutableSettings.builder()
.put(AbstractIndexStore.INDEX_STORE_THROTTLE_TYPE, "node")
));
logger.info("--> wait for snapshot to complete");
SnapshotInfo snapshotInfo = waitForCompletion("test-repo", "test-snap", TimeValue.timeValueSeconds(600));
assertThat(snapshotInfo.state(), equalTo(SnapshotState.SUCCESS));
assertThat(snapshotInfo.shardFailures().size(), equalTo(0));
logger.info("--> done");
}
private boolean waitForIndex(final String index, TimeValue timeout) throws InterruptedException {
return awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object o) {
return client().admin().indices().prepareExists(index).execute().actionGet().isExists();
}
}, timeout.millis(), TimeUnit.MILLISECONDS);
}
private boolean waitForRelocationsToStart(final String index, TimeValue timeout) throws InterruptedException {
return awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object o) {
return client().admin().cluster().prepareHealth(index).execute().actionGet().getRelocatingShards() > 0;
}
}, timeout.millis(), TimeUnit.MILLISECONDS);
}
}
| |
package io.vertx.pgclient.data;
import io.vertx.codegen.annotations.DataObject;
import io.vertx.core.json.JsonObject;
/**
* Postgres Interval is date and time based
* such as 120 years 3 months 332 days 20 hours 20 minutes 20.999999 seconds
*
* @author <a href="mailto:emad.albloushi@gmail.com">Emad Alblueshi</a>
*/
public class Interval {
private int years, months, days, hours, minutes, seconds, microseconds;
public Interval() {
this(0, 0, 0, 0, 0, 0, 0);
}
public Interval(int years, int months, int days, int hours, int minutes, int seconds, int microseconds) {
this.years = years;
this.months = months;
this.days = days;
this.hours = hours;
this.minutes = minutes;
this.seconds = seconds;
this.microseconds = microseconds;
}
public Interval(int years, int months, int days, int hours, int minutes, int seconds) {
this(years, months, days, hours, minutes, seconds, 0);
}
public Interval(int years, int months, int days, int hours, int minutes) {
this(years, months, days, hours, minutes, 0);
}
public Interval(int years, int months, int days, int hours) {
this(years, months, days, hours, 0);
}
public Interval(int years, int months, int days) {
this(years, months, days, 0);
}
public Interval(int years, int months) {
this(years, months, 0);
}
public Interval(int years) {
this(years, 0);
}
public static Interval of() {
return new Interval();
}
public static Interval of(int years, int months, int days, int hours, int minutes, int seconds, int microseconds) {
return new Interval(years, months, days, hours, minutes, seconds, microseconds);
}
public static Interval of(int years, int months, int days, int hours, int minutes, int seconds) {
return new Interval(years, months, days, hours, minutes, seconds);
}
public static Interval of(int years, int months, int days, int hours, int minutes) {
return new Interval(years, months, days, hours, minutes);
}
public static Interval of(int years, int months, int days, int hours) {
return new Interval(years, months, days, hours);
}
public static Interval of(int years, int months, int days) {
return new Interval(years, months, days);
}
public static Interval of(int years, int months) {
return new Interval(years, months);
}
public static Interval of(int years) {
return new Interval(years);
}
public Interval years(int years) {
this.years = years;
return this;
}
public Interval months(int months) {
this.months = months;
return this;
}
public Interval days(int days) {
this.days = days;
return this;
}
public Interval hours(int hours) {
this.hours = hours;
return this;
}
public Interval minutes(int minutes) {
this.minutes = minutes;
return this;
}
public Interval seconds(int seconds) {
this.seconds = seconds;
return this;
}
public Interval microseconds(int microseconds) {
this.microseconds = microseconds;
return this;
}
public int getYears() {
return years;
}
public void setYears(int years) {
this.years = years;
}
public int getMonths() {
return months;
}
public void setMonths(int months) {
this.months = months;
}
public int getDays() {
return days;
}
public void setDays(int days) {
this.days = days;
}
public int getHours() {
return hours;
}
public void setHours(int hours) {
this.hours = hours;
}
public int getMinutes() {
return minutes;
}
public void setMinutes(int minutes) {
this.minutes = minutes;
}
public int getSeconds() {
return seconds;
}
public void setSeconds(int seconds) {
this.seconds = seconds;
}
public int getMicroseconds() {
return microseconds;
}
public void setMicroseconds(int microseconds) {
this.microseconds = microseconds;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Interval interval = (Interval) o;
return years == interval.years &&
months == interval.months &&
days == interval.days &&
hours == interval.hours &&
minutes == interval.minutes &&
seconds == interval.seconds &&
microseconds == interval.microseconds;
}
@Override
public int hashCode() {
int result = years;
result = 31 * result + months;
result = 31 * result + days;
result = 31 * result + hours;
result = 31 * result + minutes;
result = 31 * result + seconds;
result = 31 * result + microseconds;
return result;
}
@Override
public String toString() {
return "Interval( " + years + " years " + months + " months " + days + " days " + hours + " hours " +
minutes + " minutes " + seconds + (microseconds == 0 ? "" : "." + Math.abs(microseconds)) + " seconds )";
}
}
| |
package org.elasql.procedure.tpart;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.elasql.cache.CachedRecord;
import org.elasql.cache.tpart.CachedEntryKey;
import org.elasql.cache.tpart.TPartCacheMgr;
import org.elasql.cache.tpart.TPartTxLocalCache;
import org.elasql.remote.groupcomm.TupleSet;
import org.elasql.schedule.tpart.sink.PushInfo;
import org.elasql.schedule.tpart.sink.SunkPlan;
import org.elasql.server.Elasql;
import org.elasql.sql.PrimaryKey;
import org.elasql.storage.tx.concurrency.ConservativeOrderedCcMgr;
import org.elasql.storage.tx.recovery.DdRecoveryMgr;
import org.vanilladb.core.remote.storedprocedure.SpResultSet;
import org.vanilladb.core.sql.Constant;
import org.vanilladb.core.sql.storedprocedure.StoredProcedure;
import org.vanilladb.core.sql.storedprocedure.StoredProcedureParamHelper;
import org.vanilladb.core.storage.tx.Transaction;
public abstract class TPartStoredProcedure<H extends StoredProcedureParamHelper>
extends StoredProcedure<H> {
public static enum ProcedureType {
NOP, NORMAL, UTILITY, MIGRATION
}
// Protected resource
protected long txNum;
protected H paramHelper;
protected int localNodeId;
protected Transaction tx;
// Private resource
private Set<PrimaryKey> readKeys = new HashSet<PrimaryKey>();
private Set<PrimaryKey> writeKeys = new HashSet<PrimaryKey>();
private SunkPlan plan;
private TPartTxLocalCache cache;
private List<CachedEntryKey> cachedEntrySet = new ArrayList<CachedEntryKey>();
private boolean isCommitted = false;
public TPartStoredProcedure(long txNum, H paramHelper) {
super(paramHelper);
if (paramHelper == null)
throw new NullPointerException("paramHelper should not be null");
this.txNum = txNum;
this.paramHelper = paramHelper;
this.localNodeId = Elasql.serverId();
}
public abstract double getWeight();
protected abstract void prepareKeys();
protected abstract void executeSql(Map<PrimaryKey, CachedRecord> readings);
@Override
public void prepare(Object... pars) {
// prepare parameters
paramHelper.prepareParameters(pars);
// prepare keys
prepareKeys();
}
public void decideExceutionPlan(SunkPlan p) {
if (plan != null)
throw new RuntimeException("The execution plan has been set");
// Set plan
plan = p;
// create a transaction
tx = Elasql.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, plan.isReadOnly(), txNum);
tx.addLifecycleListener(new DdRecoveryMgr(tx.getTransactionNumber()));
// create a local cache
cache = new TPartTxLocalCache(tx);
// register locks
bookConservativeLocks();
}
public void bookConservativeLocks() {
ConservativeOrderedCcMgr ccMgr = (ConservativeOrderedCcMgr) tx.concurrencyMgr();
ccMgr.bookReadKeys(plan.getSinkReadingInfo());
for (Set<PushInfo> infos : plan.getSinkPushingInfo().values())
for (PushInfo info : infos)
ccMgr.bookReadKey(info.getRecord());
ccMgr.bookWriteKeys(plan.getLocalWriteBackInfo());
ccMgr.bookWriteKeys(plan.getCacheDeletions());
}
private void getConservativeLocks() {
ConservativeOrderedCcMgr ccMgr = (ConservativeOrderedCcMgr) tx.concurrencyMgr();
ccMgr.requestLocks();
}
@Override
public SpResultSet execute() {
try {
// Timer.getLocalTimer().startComponentTimer("Get locks");
getConservativeLocks();
// Timer.getLocalTimer().stopComponentTimer("Get locks");
executeTransactionLogic();
tx.commit();
isCommitted = true;
} catch (Exception e) {
e.printStackTrace();
System.out.println("Tx." + txNum + "'s plan: " + plan);
tx.rollback();
}
return new SpResultSet(
isCommitted,
paramHelper.getResultSetSchema(),
paramHelper.newResultSetRecord()
);
}
@Override
protected void executeSql() {
// Do nothing
// Because we have overrided execute(), there is no need
// to implement this method.
}
public boolean isMaster() {
return plan.isHereMaster();
}
public ProcedureType getProcedureType() {
return ProcedureType.NORMAL;
}
public Set<PrimaryKey> getReadSet() {
return readKeys;
}
public Set<PrimaryKey> getWriteSet() {
return writeKeys;
}
public boolean isReadOnly() {
return paramHelper.isReadOnly();
}
public long getTxNum() {
return txNum;
}
public SunkPlan getSunkPlan() {
return plan;
}
protected void addReadKey(PrimaryKey readKey) {
readKeys.add(readKey);
}
protected void addWriteKey(PrimaryKey writeKey) {
writeKeys.add(writeKey);
}
protected void addInsertKey(PrimaryKey insertKey) {
writeKeys.add(insertKey);
}
protected void update(PrimaryKey key, CachedRecord rec) {
cache.update(key, rec);
}
protected void insert(PrimaryKey key, Map<String, Constant> fldVals) {
cache.insert(key, fldVals);
}
protected void delete(PrimaryKey key) {
cache.delete(key);
}
private void executeTransactionLogic() {
int sinkId = plan.sinkProcessId();
// Timer timer = Timer.getLocalTimer();
if (plan.isHereMaster()) {
Map<PrimaryKey, CachedRecord> readings = new HashMap<PrimaryKey, CachedRecord>();
// Read the records from the local sink
// timer.startComponentTimer("Read from sink");
for (PrimaryKey k : plan.getSinkReadingInfo()) {
readings.put(k, cache.readFromSink(k));
}
// timer.stopComponentTimer("Read from sink");
// Read all needed records
// timer.startComponentTimer("Read from cache");
for (PrimaryKey k : plan.getReadSet()) {
if (!readings.containsKey(k)) {
long srcTxNum = plan.getReadSrcTxNum(k);
readings.put(k, cache.read(k, srcTxNum));
cachedEntrySet.add(new CachedEntryKey(k, srcTxNum, txNum));
}
}
// timer.stopComponentTimer("Read from cache");
// Execute the SQLs defined by users
// timer.startComponentTimer("Execute SQL");
executeSql(readings);
// timer.stopComponentTimer("Execute SQL");
// Push the data to where they need at
// timer.startComponentTimer("Push");
Map<Integer, Set<PushInfo>> pi = plan.getPushingInfo();
if (pi != null) {
// read from local storage and send to remote site
for (Entry<Integer, Set<PushInfo>> entry : pi.entrySet()) {
int targetServerId = entry.getKey();
// Construct a tuple set
TupleSet rs = new TupleSet(sinkId);
for (PushInfo pushInfo : entry.getValue()) {
CachedRecord rec = cache.read(pushInfo.getRecord(), txNum);
cachedEntrySet.add(new CachedEntryKey(pushInfo.getRecord(), txNum, pushInfo.getDestTxNum()));
rs.addTuple(pushInfo.getRecord(), txNum, pushInfo.getDestTxNum(), rec);
}
// Push to the remote
Elasql.connectionMgr().pushTupleSet(targetServerId, rs);
}
}
// timer.stopComponentTimer("Push");
} else if (plan.hasSinkPush()) {
long sinkTxnNum = TPartCacheMgr.toSinkId(Elasql.serverId());
for (Entry<Integer, Set<PushInfo>> entry : plan.getSinkPushingInfo().entrySet()) {
int targetServerId = entry.getKey();
TupleSet rs = new TupleSet(sinkId);
// Migration transactions
// if (getProcedureType() == ProcedureType.MIGRATION) {
// long destTxNum = -1;
//
// Set<RecordKey> keys = new HashSet<RecordKey>();
// for (PushInfo pushInfo : entry.getValue()) {
// keys.add(pushInfo.getRecord());
// // XXX: Not good
// if (destTxNum == -1)
// destTxNum = pushInfo.getDestTxNum();
// }
//
// Map<RecordKey, CachedRecord> recs = cache.batchReadFromSink(keys);
//
// for (Entry<RecordKey, CachedRecord> keyRecPair : recs.entrySet()) {
// RecordKey key = keyRecPair.getKey();
// CachedRecord rec = keyRecPair.getValue();
// rec.setSrcTxNum(sinkTxnNum);
// rs.addTuple(key, sinkTxnNum, destTxNum, rec);
// }
//
// } else {
// Normal transactions
// timer.startComponentTimer("Read from sink");
for (PushInfo pushInfo : entry.getValue()) {
CachedRecord rec = cache.readFromSink(pushInfo.getRecord());
// TODO deal with null value record
rec.setSrcTxNum(sinkTxnNum);
rs.addTuple(pushInfo.getRecord(), sinkTxnNum, pushInfo.getDestTxNum(), rec);
}
// timer.stopComponentTimer("Read from sink");
// }
// timer.startComponentTimer("Push");
Elasql.connectionMgr().pushTupleSet(targetServerId, rs);
// timer.stopComponentTimer("Push");
}
}
// Flush the cached data
// including the writes to the next transaction and local write backs
cache.flush(plan, cachedEntrySet);
}
}
| |
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.animation;
import com.jme3.export.*;
import com.jme3.scene.Mesh;
import com.jme3.scene.VertexBuffer;
import com.jme3.scene.VertexBuffer.Type;
import com.jme3.util.TempVars;
import java.io.IOException;
import java.nio.FloatBuffer;
/**
* A single track of pose animation associated with a certain mesh.
*/
@Deprecated
public final class PoseTrack implements Track {
private int targetMeshIndex;
private PoseFrame[] frames;
private float[] times;
public static class PoseFrame implements Savable, Cloneable {
Pose[] poses;
float[] weights;
public PoseFrame(Pose[] poses, float[] weights) {
this.poses = poses;
this.weights = weights;
}
/**
* Serialization-only. Do not use.
*/
public PoseFrame()
{
}
/**
* This method creates a clone of the current object.
* @return a clone of the current object
*/
@Override
public PoseFrame clone() {
try {
PoseFrame result = (PoseFrame) super.clone();
result.weights = this.weights.clone();
if (this.poses != null) {
result.poses = new Pose[this.poses.length];
for (int i = 0; i < this.poses.length; ++i) {
result.poses[i] = this.poses[i].clone();
}
}
return result;
} catch (CloneNotSupportedException e) {
throw new AssertionError();
}
}
public void write(JmeExporter e) throws IOException {
OutputCapsule out = e.getCapsule(this);
out.write(poses, "poses", null);
out.write(weights, "weights", null);
}
public void read(JmeImporter i) throws IOException {
InputCapsule in = i.getCapsule(this);
weights = in.readFloatArray("weights", null);
Savable[] readSavableArray = in.readSavableArray("poses", null);
if (readSavableArray != null) {
poses = new Pose[readSavableArray.length];
System.arraycopy(readSavableArray, 0, poses, 0, readSavableArray.length);
}
}
}
public PoseTrack(int targetMeshIndex, float[] times, PoseFrame[] frames){
this.targetMeshIndex = targetMeshIndex;
this.times = times;
this.frames = frames;
}
/**
* Serialization-only. Do not use.
*/
public PoseTrack()
{
}
private void applyFrame(Mesh target, int frameIndex, float weight){
PoseFrame frame = frames[frameIndex];
VertexBuffer pb = target.getBuffer(Type.Position);
for (int i = 0; i < frame.poses.length; i++){
Pose pose = frame.poses[i];
float poseWeight = frame.weights[i] * weight;
pose.apply(poseWeight, (FloatBuffer) pb.getData());
}
// force to re-upload data to gpu
pb.updateData(pb.getData());
}
public void setTime(float time, float weight, AnimControl control, AnimChannel channel, TempVars vars) {
// TODO: When MeshControl is created, it will gather targets
// list automatically which is then retrieved here.
/*
Mesh target = targets[targetMeshIndex];
if (time < times[0]) {
applyFrame(target, 0, weight);
} else if (time > times[times.length - 1]) {
applyFrame(target, times.length - 1, weight);
} else {
int startFrame = 0;
for (int i = 0; i < times.length; i++) {
if (times[i] < time) {
startFrame = i;
}
}
int endFrame = startFrame + 1;
float blend = (time - times[startFrame]) / (times[endFrame] - times[startFrame]);
applyFrame(target, startFrame, blend * weight);
applyFrame(target, endFrame, (1f - blend) * weight);
}
*/
}
/**
* @return the length of the track
*/
public float getLength() {
return times == null ? 0 : times[times.length - 1] - times[0];
}
/**
* This method creates a clone of the current object.
* @return a clone of the current object
*/
@Override
public PoseTrack clone() {
try {
PoseTrack result = (PoseTrack) super.clone();
result.times = this.times.clone();
if (this.frames != null) {
result.frames = new PoseFrame[this.frames.length];
for (int i = 0; i < this.frames.length; ++i) {
result.frames[i] = this.frames[i].clone();
}
}
return result;
} catch (CloneNotSupportedException e) {
throw new AssertionError();
}
}
@Override
public void write(JmeExporter e) throws IOException {
OutputCapsule out = e.getCapsule(this);
out.write(targetMeshIndex, "meshIndex", 0);
out.write(frames, "frames", null);
out.write(times, "times", null);
}
@Override
public void read(JmeImporter i) throws IOException {
InputCapsule in = i.getCapsule(this);
targetMeshIndex = in.readInt("meshIndex", 0);
times = in.readFloatArray("times", null);
Savable[] readSavableArray = in.readSavableArray("frames", null);
if (readSavableArray != null) {
frames = new PoseFrame[readSavableArray.length];
System.arraycopy(readSavableArray, 0, frames, 0, readSavableArray.length);
}
}
}
| |
/*******************************************************************************
* Copyright 2015 Esri
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.esri.wdc.offlinemapper.view;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.spec.InvalidKeySpecException;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.DESKeySpec;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.util.Base64;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import com.esri.android.oauth.OAuthView;
import com.esri.android.runtime.ArcGISRuntime;
import com.esri.core.io.UserCredentials;
import com.esri.core.map.CallbackListener;
import com.esri.wdc.offlinemapper.R;
import com.esri.wdc.offlinemapper.controller.MapDownloadService;
import com.esri.wdc.offlinemapper.model.NetworkModel;
public class LoginActivity extends Activity {
private static final String TAG = LoginActivity.class.getSimpleName();
private static final String USER_CREDENTIALS_KEY = "UserCredentials";
private static final String PORTAL_URL_KEY = "PortalUrl";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ArcGISRuntime.setClientId(getString(R.string.clientId));
doLogin();
}
public void doLogin(View view) {
doLogin();
}
private void doLogin() {
final SharedPreferences prefs = this.getPreferences(MODE_PRIVATE);
String userCredsString = prefs.getString(USER_CREDENTIALS_KEY, null);
String portalUrl = prefs.getString(PORTAL_URL_KEY, null);
if (null == portalUrl || null == userCredsString) {
prefs.edit()
.remove(PORTAL_URL_KEY)
.remove(USER_CREDENTIALS_KEY)
.commit();
userCredsString = null;
portalUrl = "https://www.arcgis.com";//TODO get from user when we support Portal
final String portalUrlFinal = portalUrl;
if (NetworkModel.isConnected(this)) {
OAuthView oauthView = new OAuthView(this, portalUrl, getString(R.string.clientId), new CallbackListener<UserCredentials>() {
public void onError(Throwable e) {
}
public void onCallback(final UserCredentials userCredentials) {
runOnUiThread(new Runnable() {
public void run() {
doCreatePin(portalUrlFinal, userCredentials, null);
}
});
}
});
setContentView(oauthView);
} else {
setContentView(R.layout.activity_login_disconnected);
}
} else {
final String portalUrlFinal = portalUrl;
final String userCredsStringFinal = userCredsString;
runOnUiThread(new Runnable() {
public void run() {
doEnterPin(portalUrlFinal, userCredsStringFinal, null);
}
});
}
}
private void doCreatePin(final String portalUrl, final UserCredentials userCredentials, String error) {
setContentView(R.layout.activity_login_pin);
final EditText editText = (EditText) findViewById(R.id.editText_pin);
if (null != error) {
editText.setError(error);
}
((TextView) findViewById(R.id.textView_pinLabel)).setText(getString(R.string.create_pin));
final Button okButton = (Button) findViewById(R.id.button_ok);
okButton.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
final String firstPin = editText.getText().toString().trim();
editText.setText("");
((TextView) findViewById(R.id.textView_pinLabel)).setText(getString(R.string.confirm_pin));
okButton.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
String secondPin = editText.getText().toString().trim();
if (0 < firstPin.length() && firstPin.equals(secondPin)) {
String enc = encryptIt(userCredentials, Integer.parseInt(firstPin));
SharedPreferences prefs = LoginActivity.this.getPreferences(MODE_PRIVATE);
prefs.edit()
.putString(PORTAL_URL_KEY, portalUrl)
.putString(USER_CREDENTIALS_KEY, enc)
.commit();
startMapChooserActivity(portalUrl, userCredentials);
} else {
doCreatePin(portalUrl, userCredentials, getString(R.string.error_pin_mismatch));
}
}
});
}
});
editText.requestFocus();
}
private void doEnterPin(final String portalUrl, final String encryptedCredentials, String error) {
setContentView(R.layout.activity_login_pin);
final EditText editText = (EditText) findViewById(R.id.editText_pin);
if (null != error) {
editText.setError(error);
}
((TextView) findViewById(R.id.textView_pinLabel)).setText(getString(R.string.enter_pin));
((Button) findViewById(R.id.button_ok)).setOnClickListener(new OnClickListener() {
public void onClick(View v) {
try {
UserCredentials userCredentials = (UserCredentials) decryptIt(encryptedCredentials, Integer.parseInt(editText.getText().toString()));
startMapChooserActivity(portalUrl, userCredentials);
} catch (Throwable t) {
editText.setText("");
doEnterPin(portalUrl, encryptedCredentials, "Incorrect PIN");
}
}
});
}
private void startMapChooserActivity(String portalUrl, UserCredentials userCredentials) {
Intent mapDownloadServiceIntent = new Intent(this, MapDownloadService.class);
mapDownloadServiceIntent.putExtra(MapDownloadService.EXTRA_USER_CREDENTIALS, userCredentials);
mapDownloadServiceIntent.putExtra(MapDownloadService.EXTRA_PORTAL_URL, portalUrl);
startService(mapDownloadServiceIntent);
Intent i = new Intent(getApplicationContext(), MapChooserActivity.class);
i.putExtra(MapChooserActivity.EXTRA_USER_CREDENTIALS, userCredentials);
i.putExtra(MapChooserActivity.EXTRA_PORTAL_URL, portalUrl);
startActivity(i);
}
public void logout(View view) {
stopService(new Intent(getApplicationContext(), MapDownloadService.class));
getPreferences(MODE_PRIVATE).edit()
.remove(PORTAL_URL_KEY)
.remove(USER_CREDENTIALS_KEY).commit();
doLogin();
}
private static String fixSeed(int seed) {
String seedString = Integer.toString(seed);
while (8 > seedString.length()) {
seedString += " ";
}
return seedString;
}
private static String encryptIt(Serializable object, int seed) {
String seedString = fixSeed(seed);
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
try {
ObjectOutputStream outstream = new ObjectOutputStream(byteOut);
outstream.writeObject(object);
outstream.close();
DESKeySpec keySpec = new DESKeySpec(seedString.getBytes("UTF8"));
SecretKeyFactory keyFactory = SecretKeyFactory.getInstance("DES");
SecretKey key = keyFactory.generateSecret(keySpec);
// Cipher is not thread safe
Cipher cipher = Cipher.getInstance("DES");
cipher.init(Cipher.ENCRYPT_MODE, key);
String encrypedValue = Base64.encodeToString(cipher.doFinal(byteOut.toByteArray()), Base64.DEFAULT);
return encrypedValue;
} catch (Throwable t) {
Log.e(TAG, "Couldn't encrypt", t);
return null;
}
}
private static Object decryptIt(String value, int seed)
throws IOException, InvalidKeyException, NoSuchAlgorithmException, InvalidKeySpecException, NoSuchPaddingException, IllegalBlockSizeException, BadPaddingException, ClassNotFoundException {
String seedString = fixSeed(seed);
DESKeySpec keySpec = new DESKeySpec(seedString.getBytes("UTF8"));
SecretKeyFactory keyFactory = SecretKeyFactory.getInstance("DES");
SecretKey key = keyFactory.generateSecret(keySpec);
byte[] encrypedPwdBytes = Base64.decode(value, Base64.DEFAULT);
// cipher is not thread safe
Cipher cipher = Cipher.getInstance("DES");
cipher.init(Cipher.DECRYPT_MODE, key);
byte[] decrypedValueBytes = (cipher.doFinal(encrypedPwdBytes));
ByteArrayInputStream byteIn = new ByteArrayInputStream(decrypedValueBytes);
ObjectInputStream objIn = new ObjectInputStream(byteIn);
return objIn.readObject();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.login, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
// if (id == R.id.action_settings) {
// return true;
// }
return super.onOptionsItemSelected(item);
}
}
| |
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.math;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.terasology.math.geom.Vector3f;
import org.terasology.math.geom.Vector3i;
import java.util.HashSet;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* @author Immortius
*/
public class Vector3iTest {
private Vector3i v1;
private Vector3i v2;
private Vector3i v3;
public Vector3iTest() {
}
@BeforeClass
public static void setUpClass() throws Exception {
}
@AfterClass
public static void tearDownClass() throws Exception {
}
@Before
public void setUp() {
v1 = new Vector3i(1, 3, 7);
v2 = new Vector3i(2, 6, 14);
v3 = new Vector3i(3, 9, 21);
}
@After
public void tearDown() {
}
@Test
public void emptyConstructor() {
Vector3i v = new Vector3i();
assertEquals(0, v.x);
assertEquals(0, v.y);
assertEquals(0, v.z);
}
@Test
public void tripleConstructor() {
Vector3i v = new Vector3i(1, 2, 3);
assertEquals(1, v.x);
assertEquals(2, v.y);
assertEquals(3, v.z);
}
@Test
public void offsetConstructor() {
Vector3f vOrig = new Vector3f(0.1f, 0.6f, 7.2f);
Vector3i v = new Vector3i(vOrig, 0.5f);
assertEquals(new Vector3i(0, 1, 7), v);
}
@Test
public void offsetConstructorWithNegatives() {
Vector3f vOrig = new Vector3f(-0.1f, -0.6f, -1.4f);
Vector3i v = new Vector3i(vOrig, 0.5f);
assertEquals(new Vector3i(0, -1, -1), v);
}
@Test
public void copyConstructor() {
Vector3i copy = new Vector3i(v1);
assertEquals(v1.x, copy.x);
assertEquals(v1.y, copy.y);
assertEquals(v1.z, copy.z);
}
@Test
public void testEquals() {
assertFalse(v1.equals(v2));
assertTrue(v1.equals(new Vector3i(v1.x, v1.y, v1.z)));
assertFalse(v1.equals(null));
}
@Test
public void testSetTriple() {
Vector3i v = new Vector3i(v1);
v.set(v2.x, v2.y, v2.z);
assertEquals(v2, v);
}
@Test
public void testSetCopy() {
Vector3i v = new Vector3i();
v.set(v2);
assertEquals(v2, v);
}
@Test
public void testAdd() {
Vector3i v = new Vector3i(v1);
v.add(v2);
assertEquals(v3, v);
}
@Test
public void testAddTriple() {
Vector3i v = new Vector3i(v1);
v.add(v2.x, v2.y, v2.z);
assertEquals(v3, v);
}
@Test
public void testMin() {
Vector3i v = new Vector3i(v1);
v.min(new Vector3i(v1.z, v1.y, v1.x));
assertEquals(Math.min(v1.x, v1.z), v.x);
assertEquals(v1.y, v.y);
assertEquals(Math.min(v1.x, v1.z), v.z);
}
@Test
public void testMax() {
Vector3i v = new Vector3i(v1);
v.max(new Vector3i(v1.z, v1.y, v1.x));
assertEquals(Math.max(v1.x, v1.z), v.x);
assertEquals(v1.y, v.y);
assertEquals(Math.max(v1.x, v1.z), v.z);
}
@Test
public void testIsUnitVector() {
assertFalse(Vector3iUtil.isUnitVector(Vector3i.zero()));
assertTrue(Vector3iUtil.isUnitVector(Vector3iUtil.unitX()));
assertTrue(Vector3iUtil.isUnitVector(Vector3iUtil.unitY()));
assertTrue(Vector3iUtil.isUnitVector(Vector3iUtil.unitZ()));
Vector3i v = Vector3iUtil.unitX();
v.scale(-1);
assertTrue(Vector3iUtil.isUnitVector(v));
assertFalse(Vector3iUtil.isUnitVector(Vector3i.one()));
}
@Test
public void testManhattanDistance() {
assertEquals(0, Vector3i.zero().gridDistance(Vector3i.zero()));
assertEquals(1, Vector3i.zero().gridDistance(Vector3iUtil.unitX()));
assertEquals(1, Vector3i.zero().gridDistance(Vector3iUtil.unitY()));
assertEquals(1, Vector3i.zero().gridDistance(Vector3iUtil.unitZ()));
assertEquals(3, Vector3i.zero().gridDistance(Vector3i.one()));
assertEquals(3, Vector3i.zero().gridDistance(new Vector3i(1, -1, 1)));
}
@Test
public void testManhattanMagnitude() {
assertEquals(0, Vector3iUtil.gridMagnitude(Vector3i.zero()));
assertEquals(1, Vector3iUtil.gridMagnitude(Vector3iUtil.unitX()));
assertEquals(1, Vector3iUtil.gridMagnitude(Vector3iUtil.unitY()));
assertEquals(1, Vector3iUtil.gridMagnitude(Vector3iUtil.unitZ()));
assertEquals(3, Vector3iUtil.gridMagnitude(Vector3i.one()));
assertEquals(3, Vector3iUtil.gridMagnitude(new Vector3i(1, -1, 1)));
}
@Test
public void testHash() {
// k = 59
// j = k+1 = 60
// i = (k+k^2)(1+k)^2 = not important for collision
// hash = (k+k^2)(1+k)^2 + x(1+k)^2 + (1+k)y + z
// hash = i + xj^2 + jy + z
// with x := 0
// hash = i + jy + z
// set i + jy + z = i + jy' + z'
// jy + z = jy' + z'
// set z' := z + j -> z'-z = j
// jy = jy' + j -> y' = y - 1
Vector3i a = new Vector3i(0, 10, 10);
Vector3i b = new Vector3i(0, 9, 70);
assertFalse(a.hashCode() == b.hashCode());
assertTrue(new Vector3i(0, 10, 10).hashCode() == new Vector3i(0, 10, 10).hashCode());
assertTrue(new Vector3i(-100, 10, 10).hashCode() == new Vector3i(-100, 10, 10).hashCode());
assertTrue(new Vector3i(0, -5, -5).hashCode() == new Vector3i(0, -5, -5).hashCode());
assertFalse(new Vector3i(1, 10, 10).hashCode() == new Vector3i(0, 10, 10).hashCode());
assertFalse(new Vector3i(-101, 10, 10).hashCode() == new Vector3i(-100, 10, 10).hashCode());
assertFalse(new Vector3i(0, -1, -5).hashCode() == new Vector3i(0, -5, -5).hashCode());
}
@Test
public void testHashCollisions() {
int range = 50;
Set<Integer> alreadyUsedHashes = new HashSet<>();
for (int x = -range; x < range; ++x) {
for (int y = -range; y < range; ++y) {
for (int z = -range; z < range; ++z) {
int hash = new Vector3i(x, y, z).hashCode();
if (alreadyUsedHashes.contains(hash)) {
fail(String.format("duplicate hash %d at: %d,%d,%d", hash, x, y, z));
} else {
alreadyUsedHashes.add(hash);
}
}
}
}
}
}
| |
package testSurvey;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Scanner;
public class MultipleChoice extends Question{
private ArrayList<String> options;
private String _answer;
private String userAnswer;
private Output output;
public MultipleChoice(String questionString, ArrayList<String> options, String answer, String type) {
// TODO Auto-generated constructor stub
super(questionString,type);
this.setOptions(options);
this.setAnswer(answer);
this.output = new Output();
}
public MultipleChoice()
{
super();
}
public ArrayList<String> getOptions() {
return options;
}
public void setOptions(ArrayList<String> options) {
this.options = options;
}
public void setAnswer(String answer) {
this._answer = answer;
}
/*
sample:
{"question": "Which number is largest?", "answer": "20", "type": "multipleChoice", "answers": [
{"answer": "20"},
{"answer": "10"},
{"answer": "3"},
{"answer": "4"}
]
}
*/
public String toString(){
//question
String string = "{\"question\":\"";
string = string + this.getQuestion() + "\",";
//answer
string = string + "\"answer\": \""+ this.getAnswer().get(0) +"\",";
//type
string = string + "\"type\": \"multipleChoice\",";
//answers
string = string + "\"answers\": [";
for(int i = 0;i<options.size();i++){
if(i <options.size()-1){
string = string + "{\"answer\": \""+options.get(i)+"\"},";
} else{ //meaning this is the last one
string = string + "{\"answer\": \""+options.get(i)+"\"}";
}
}
string = string + "]}";
return string;
}
public String toStringSurvey(){
//question
String string = "{\"question\":\"";
string = string + this.getQuestion() + "\",";
//answer
//string = string + "\"answer\": \""+ this.getAnswer() +"\",";
//type
string = string + "\"type\": \"multipleChoice\",";
//answers
string = string + "\"answers\": [";
for(int i = 0;i<options.size();i++){
if(i <options.size()-1){
string = string + "{\"answer\": \""+options.get(i)+"\"},";
} else{ //meaning this is the last one
string = string + "{\"answer\": \""+options.get(i)+"\"}";
}
}
string = string + "]}";
return string;
}
public void display(){
this.output.println(this.getQuestion() + " Multiple choice");
for(int i = 0;i<options.size();i++){
this.output.println(options.get(i));
}
}
public String optionsToString()
{
String optionString = new String();
for(int i = 0; i< this.options.size();i++)
{
optionString = optionString + i + ") " + this.options.get(i) + "\n";
}
return optionString;
}
private void changeAnswer()
{
for(int i = 0; i < options.size();i++)
{
this.output.println(i + ": " + options.get(i));
}
String response = this.getResponse(new Scanner(new InputStreamReader(System.in)),"Enter number corresponding correct answer to multiple choice");
try
{
int num = Integer.parseInt(response);
if(num < options.size() && num >=0)
{
// we now have all the values needed if we're in here
this.setAnswer(options.get(num));
}
else
{
this.output.println("Invalid number: "+ num +" doesn't fit in parameters given above");
this.changeAnswer();
}
}
catch (NumberFormatException e)
{
this.output.println("Invalid: Enter number please");
}
}
private void changeOptions()
{
Boolean valid = true;
ArrayList<String> options = new ArrayList<String>();
do
{ //ensures that atleast one option is prevalent
String option = this.getResponse(new Scanner(new InputStreamReader(System.in)),"Add option:");
options.add(option);
option = this.getResponse(new Scanner(new InputStreamReader(System.in)),"Add another option? 1 for yes, anything else for no");
if(!option.equals("1"))
{
valid = false;
}
} while(valid == true);
this.options = options;
}
@Override
public void modifyExam()
{
// can run modify survey since if options are changed answer must be changed
// change answer
this.output.println("Prompt: " + this.getQuestion());
String answer = this.getResponse(new Scanner(new InputStreamReader(System.in)),"Would you like to change the prompt that is above? 1 for yes, anything else for no");
if(answer.equals("1"))
{
this.changePrompt();;
}
// change options
this.output.println(this.optionsToString());
answer = this.getResponse(new Scanner(new InputStreamReader(System.in)),"Would you like to change the options that are above? 1 for yes, anything else for no");
if(answer.equals("1"))
{
this.changeOptions();
//change answer
this.changeAnswer();
}
else
{
// check if wants to change answer
this.output.println("Answer: " + this._answer);
String option = answer = this.getResponse(new Scanner(new InputStreamReader(System.in)),"Would you like to change the answer? 1 for yes, anything else for no.");
// if yes change
if(option.equals("1"))
{
this.changeAnswer();
}
}
}
@Override
public void modifySurvey()
{
// change prompt
this.output.println("Prompt: " + this.getQuestion());
String answer = this.getResponse(new Scanner(new InputStreamReader(System.in)),"Would you like to change the prompt that is above? 1 for yes, anything else for no");
if(answer.equals("1"))
{
this.changePrompt();
}
// change options
this.output.println(this.optionsToString());
answer = this.getResponse(new Scanner(new InputStreamReader(System.in)),"Would you like to change the options that are above? 1 for yes, anything else for no");
if(answer.equals("1"))
{
this.changeOptions();
}
}
private void changePrompt()
{
this.setQuestion(this.getResponse(new Scanner(new InputStreamReader(System.in)),"Enter new question:"));
}
@Override
public void makeExamQuestion()
{
this.changePrompt();
this.changeOptions();
this.changeAnswer();
this.setType("multipleChoice");
}
@Override
public void makeSurveyQuestion()
{
// TODO Auto-generated method stub
this.changePrompt();
this.changeOptions();
this._answer = null;
this.setType("multipleChoice");
}
@Override
public String toStringUserAnswers()
{
//question
String string = "{\"question\":\"";
string = string + this.getQuestion() + "\",";
//answer
string = string + "\"answer\": \""+ this._answer +"\",";
//user answer
string = string + "\"user_answer\": \""+ this.userAnswer +"\",";
//type
string = string + "\"type\": \"multipleChoice\"}";
return string;
}
@Override
public void setUserAnswer(ArrayList<String> x)
{
this.userAnswer = x.get(0);
}
@Override
public ArrayList<String> getUserAnswer() {
ArrayList<String> ans = new ArrayList<String>();
ans.add(this.userAnswer);
return ans;
}
@Override
public double grade()
{
if(this.userAnswer.equals(this._answer))
{
return 10;
}
return 0;
}
@Override
public void askForUserAnswer()
{
this.output.println(this.getQuestion());
this.output.println(this.optionsToString());
String response = this.getResponse(new Scanner(new InputStreamReader(System.in)),"Enter number corresponding correct answer to multiple choice");
try
{
int num = Integer.parseInt(response);
if(num < options.size() && num >=0)
{
// we now have all the values needed if we're in here
this.userAnswer = options.get(num);
}
else
{
this.output.println("Invalid number: "+ num +" doesn't fit in parameters given above");
this.askForUserAnswer();
}
}
catch (NumberFormatException e)
{
this.output.println("Invalid: Enter number please");
this.askForUserAnswer();
}
}
@Override
public String toStringSurveyUserAnswers()
{
//question
String string = "{\"question\":\"";
string = string + this.getQuestion() + "\",";
//user answer
string = string + "\"user_answer\": \""+ this.userAnswer +"\",";
//type
string = string + "\"type\": \"multipleChoice\"}";
return string;
}
@Override
public Question copy()
{
Question example = new MultipleChoice(this.getQuestion(),this.options,this._answer, this.getType());
ArrayList<String> arr = new ArrayList<String>();
arr.add(this.userAnswer);
example.setUserAnswer(arr);
example.setQuestion(this.getQuestion());
return example;
}
@Override
public ArrayList<String> getAnswer()
{
ArrayList<String> blam = new ArrayList<String>();
blam.add(this._answer);
return blam;
}
@Override
public Question copySurvey() {
Question example = new MultipleChoice(this.getQuestion(),this.options,this._answer, this.getType());
ArrayList<String> arr = new ArrayList<String>();
arr.add(this.userAnswer);
example.setUserAnswer(arr);
example.setQuestion(this.getQuestion());
return example;
}
}
| |
package org.exarhteam.iitc_mobile;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.SharedPreferences;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Build;
import android.preference.PreferenceManager;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.WindowManager;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.widget.Toast;
import org.exarhteam.iitc_mobile.async.CheckHttpResponse;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
@SuppressLint("SetJavaScriptEnabled")
public class IITC_WebView extends WebView {
// fullscreen modes
public static final int FS_ENABLED = (1 << 0);
public static final int FS_SYSBAR = (1 << 1);
public static final int FS_ACTIONBAR = (1 << 2);
public static final int FS_STATUSBAR = (1 << 3);
public static final int FS_NAVBAR = (1 << 4);
private WebSettings mSettings;
private IITC_WebViewClient mIitcWebViewClient;
private IITC_WebChromeClient mIitcWebChromeClient;
private IITC_JSInterface mJsInterface;
private IITC_Mobile mIitc;
private SharedPreferences mSharedPrefs;
private int mFullscreenStatus = 0;
private Runnable mNavHider;
private boolean mDisableJs = false;
private final String mDesktopUserAgent = "Mozilla/5.0 (X11; Linux x86_64; rv:17.0)" +
" Gecko/20130810 Firefox/17.0 Iceweasel/17.0.8";
private String mMobileUserAgent;
// init web view
private void iitc_init(final Context c) {
if (isInEditMode()) return;
mIitc = (IITC_Mobile) c;
mSettings = getSettings();
mSettings.setJavaScriptEnabled(true);
mSettings.setDomStorageEnabled(true);
mSettings.setAllowFileAccess(true);
mSettings.setGeolocationEnabled(true);
mSettings.setAppCacheEnabled(true);
mSettings.setCacheMode(WebSettings.LOAD_DEFAULT);
mSettings.setAppCachePath(getContext().getCacheDir().getAbsolutePath());
mSettings.setDatabasePath(getContext().getApplicationInfo().dataDir + "/databases/");
// enable mixed content (http on https...needed for some map tiles) mode
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
setWebContentsDebuggingEnabled(true);
mSettings.setMixedContentMode(WebSettings.MIXED_CONTENT_COMPATIBILITY_MODE);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
setWebContentsDebuggingEnabled(true);
mJsInterface = new IITC_JSInterfaceKitkat(mIitc);
} else {
mJsInterface = new IITC_JSInterface(mIitc);
}
addJavascriptInterface(mJsInterface, "android");
mSharedPrefs = PreferenceManager.getDefaultSharedPreferences(mIitc);
// Hack to work Google login page in old browser
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP &&
!mSharedPrefs.getBoolean("pref_fake_user_agent", false))
mSharedPrefs.edit().putBoolean("pref_fake_user_agent", true).apply();
final String original_ua = mSettings.getUserAgentString();
// remove ";wv " marker as Google blocks WebViews from using OAuth
// https://developer.chrome.com/multidevice/user-agent#webview_user_agent
mMobileUserAgent = original_ua.replace("; wv", "");
setUserAgent();
mNavHider = new Runnable() {
@Override
public void run() {
if (isInFullscreen() && (getFullscreenStatus() & (FS_NAVBAR)) != 0) {
int systemUiVisibility = SYSTEM_UI_FLAG_HIDE_NAVIGATION;
// in immersive mode the user can interact with the app while the navbar is hidden
// this mode is available since KitKat
// you can leave this mode by swiping down from the top of the screen. this does only work
// when the app is in total-fullscreen mode
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && (mFullscreenStatus & FS_SYSBAR) != 0) {
systemUiVisibility |= SYSTEM_UI_FLAG_IMMERSIVE;
}
setSystemUiVisibility(systemUiVisibility);
}
}
};
mIitcWebChromeClient = new IITC_WebChromeClient(mIitc);
setWebChromeClient(mIitcWebChromeClient);
mIitcWebViewClient = new IITC_WebViewClient(mIitc);
setWebViewClient(mIitcWebViewClient);
}
// constructors -------------------------------------------------
public IITC_WebView(final Context context) {
super(context);
iitc_init(context);
}
public IITC_WebView(final Context context, final AttributeSet attrs) {
super(context, attrs);
iitc_init(context);
}
public IITC_WebView(final Context context, final AttributeSet attrs, final int defStyle) {
super(context, attrs, defStyle);
iitc_init(context);
}
// ----------------------------------------------------------------
@Override
public void loadUrl(String url) {
if (url.startsWith("javascript:")) {
// do nothing if script is enabled;
if (mDisableJs) {
Log.d("javascript injection disabled...return");
return;
}
loadJS(url.substring("javascript:".length()));
} else {
// Niantic no longer allows connections without https
url = url.replace("http://", "https://");
// disable splash screen if a http error code is responded
new CheckHttpResponse(mIitc).execute(url);
Log.d("loading url: " + url);
super.loadUrl(url);
}
}
@TargetApi(19)
public void loadJS(final String js) {
boolean classicWebView = Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT;
if (!classicWebView) {
// some strange Android 4.4+ custom ROMs are using the classic webview
try {
evaluateJavascript(js, null);
} catch (final IllegalStateException e) {
Log.e(e);
Log.d("Classic WebView detected: use old injection method");
classicWebView = true;
}
}
if (classicWebView) {
// if in edit text mode, don't load javascript otherwise the keyboard closes.
final HitTestResult testResult = getHitTestResult();
if (testResult != null && testResult.getType() == HitTestResult.EDIT_TEXT_TYPE) {
// let window.show(...) interrupt input
// window.show(...) is called if one of the action bar buttons
// is clicked
if (!js.startsWith("window.show(")) {
Log.d("in insert mode. do not load script.");
return;
}
}
super.loadUrl("javascript:" + js);
}
}
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouchEvent(final MotionEvent event) {
getHandler().removeCallbacks(mNavHider);
getHandler().postDelayed(mNavHider, 3000);
return super.onTouchEvent(event);
}
@Override
public void setSystemUiVisibility(final int visibility) {
if ((visibility & SYSTEM_UI_FLAG_HIDE_NAVIGATION) == 0) {
getHandler().postDelayed(mNavHider, 3000);
}
super.setSystemUiVisibility(visibility);
}
@Override
public void onWindowFocusChanged(final boolean hasWindowFocus) {
if (hasWindowFocus) {
getHandler().postDelayed(mNavHider, 3000);
// if the webView has focus, JS should always be enabled
mDisableJs = false;
} else {
getHandler().removeCallbacks(mNavHider);
}
super.onWindowFocusChanged(hasWindowFocus);
}
public void toggleFullscreen() {
mFullscreenStatus ^= FS_ENABLED;
final WindowManager.LayoutParams attrs = mIitc.getWindow().getAttributes();
// toggle notification bar
if (isInFullscreen()) {
// show a toast with instructions to exit the fullscreen mode again
Toast.makeText(mIitc, "Press back button to exit fullscreen", Toast.LENGTH_SHORT).show();
if ((mFullscreenStatus & FS_ACTIONBAR) != 0) {
mIitc.getNavigationHelper().hideActionBar();
}
if ((mFullscreenStatus & FS_SYSBAR) != 0) {
attrs.flags |= WindowManager.LayoutParams.FLAG_FULLSCREEN;
}
if ((mFullscreenStatus & FS_NAVBAR) != 0) {
getHandler().post(mNavHider);
}
if ((mFullscreenStatus & FS_STATUSBAR) != 0) {
loadUrl("javascript: $('#updatestatus').hide();");
}
} else {
attrs.flags &= ~WindowManager.LayoutParams.FLAG_FULLSCREEN;
mIitc.getNavigationHelper().showActionBar();
loadUrl("javascript: $('#updatestatus').show();");
}
mIitc.getWindow().setAttributes(attrs);
mIitc.invalidateOptionsMenu();
}
void updateFullscreenStatus() {
final String[] fullscreenDefaults = getResources().getStringArray(R.array.pref_hide_fullscreen_defaults);
final Set<String> entries = mSharedPrefs.getStringSet("pref_fullscreen",
new HashSet<String>(Arrays.asList(fullscreenDefaults)));
mFullscreenStatus &= FS_ENABLED;
for (final String entry : entries) {
mFullscreenStatus += Integer.parseInt(entry);
}
}
int getFullscreenStatus() {
return mFullscreenStatus;
}
public boolean isInFullscreen() {
return (mFullscreenStatus & FS_ENABLED) != 0;
}
public IITC_WebViewClient getWebViewClient() {
return mIitcWebViewClient;
}
public IITC_JSInterface getJSInterface() {
return mJsInterface;
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public boolean isConnectedToWifi() {
final ConnectivityManager conMan = (ConnectivityManager) mIitc.getSystemService(Context.CONNECTIVITY_SERVICE);
final NetworkInfo wifi = conMan.getNetworkInfo(ConnectivityManager.TYPE_WIFI);
// since jelly bean you can mark wifi networks as mobile hotspots
// settings -> data usage -> menu -> mobile hotspots
// ConnectivityManager.isActiveNetworkMeter returns if the currently used wifi-network
// is ticked as mobile hotspot or not.
// --> IITC_WebView.isConnectedToWifi should return 'false' if connected to mobile hotspot
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (conMan.isActiveNetworkMetered()) return false;
}
return (wifi.getState() == NetworkInfo.State.CONNECTED);
}
public void disableJS(final boolean val) {
mDisableJs = val;
}
public void setUserAgent() {
final String ua = mSharedPrefs.getBoolean("pref_fake_user_agent", false) ?
mDesktopUserAgent : mMobileUserAgent;
Log.d("setting user agent to: " + ua);
mSettings.setUserAgentString(ua);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.commons.jackrabbit.authorization.AccessControlUtils;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.PerfLogger;
import org.apache.jackrabbit.oak.plugins.index.search.util.IndexDefinitionBuilder;
import org.apache.jackrabbit.oak.query.AbstractJcrTest;
import org.apache.jackrabbit.oak.query.facet.FacetResult;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.query.Query;
import javax.jcr.query.QueryResult;
import javax.jcr.security.Privilege;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Random;
import java.util.UUID;
import java.util.stream.Collectors;
import static org.apache.jackrabbit.commons.JcrUtils.getOrCreateByPath;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.FACETS;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.PROP_REFRESH_DEFN;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.PROP_SECURE_FACETS;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.PROP_SECURE_FACETS_VALUE_INSECURE;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.PROP_SECURE_FACETS_VALUE_STATISTICAL;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.PROP_STATISTICAL_FACET_SAMPLE_SIZE;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.STATISTICAL_FACET_SAMPLE_SIZE_DEFAULT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
public abstract class SecureFacetCommonTest extends AbstractJcrTest {
private static final Logger LOG = LoggerFactory.getLogger(AbstractJcrTest.class);
private static final PerfLogger LOG_PERF = new PerfLogger(LOG);
protected TestRepository repositoryOptionsUtil;
protected Node indexNode;
protected IndexOptions indexOptions;
private static final String FACET_PROP = "facets";
private static final int NUM_LEAF_NODES = STATISTICAL_FACET_SAMPLE_SIZE_DEFAULT;
private static final int NUM_LABELS = 4;
private static final int NUM_LEAF_NODES_FOR_LARGE_DATASET = NUM_LEAF_NODES;
private static final int NUM_LEAF_NODES_FOR_SMALL_DATASET = NUM_LEAF_NODES / (2 * NUM_LABELS);
private final Map<String, Integer> actualLabelCount = new HashMap<>();
private final Map<String, Integer> actualAclLabelCount = new HashMap<>();
private final Map<String, Integer> actualAclPar1LabelCount = new HashMap<>();
@Before
public void createIndex() throws RepositoryException {
String indexName = UUID.randomUUID().toString();
IndexDefinitionBuilder builder = indexOptions.createIndex(indexOptions.createIndexDefinitionBuilder(), false);
builder.noAsync();
IndexDefinitionBuilder.IndexRule indexRule = builder.indexRule(JcrConstants.NT_BASE);
indexRule.property("cons").propertyIndex();
indexRule.property("foo").propertyIndex().getBuilderTree().setProperty(FACET_PROP, true, Type.BOOLEAN);
indexRule.property("bar").propertyIndex().getBuilderTree().setProperty(FACET_PROP, true, Type.BOOLEAN);
indexOptions.setIndex(adminSession, indexName, builder);
indexNode = indexOptions.getIndexNode(adminSession, indexName);
}
private void createDataset(int numberOfLeafNodes) throws RepositoryException {
Random rGen = new Random(42);
Random rGen1 = new Random(42);
int[] foolabelCount = new int[NUM_LABELS];
int[] fooaclLabelCount = new int[NUM_LABELS];
int[] fooaclPar1LabelCount = new int[NUM_LABELS];
int[] barlabelCount = new int[NUM_LABELS];
int[] baraclLabelCount = new int[NUM_LABELS];
int[] baraclPar1LabelCount = new int[NUM_LABELS];
Node par = allow(getOrCreateByPath("/parent", "oak:Unstructured", adminSession));
for (int i = 0; i < NUM_LABELS; i++) {
Node subPar = par.addNode("par" + i);
for (int j = 0; j < numberOfLeafNodes; j++) {
Node child = subPar.addNode("c" + j);
child.setProperty("cons", "val");
// Add a random label out of "l0", "l1", "l2", "l3"
int foolabelNum = rGen.nextInt(NUM_LABELS);
int barlabelNum = rGen1.nextInt(NUM_LABELS);
child.setProperty("foo", "l" + foolabelNum);
child.setProperty("bar", "m" + barlabelNum);
foolabelCount[foolabelNum]++;
barlabelCount[barlabelNum]++;
if (i != 0) {
fooaclLabelCount[foolabelNum]++;
baraclLabelCount[barlabelNum]++;
}
if (i == 1) {
fooaclPar1LabelCount[foolabelNum]++;
baraclPar1LabelCount[barlabelNum]++;
}
}
// deny access for one sub-parent
if (i == 0) {
deny(subPar);
}
}
adminSession.save();
for (int i = 0; i < foolabelCount.length; i++) {
actualLabelCount.put("l" + i, foolabelCount[i]);
actualLabelCount.put("m" + i, barlabelCount[i]);
actualAclLabelCount.put("l" + i, fooaclLabelCount[i]);
actualAclLabelCount.put("m" + i, baraclLabelCount[i]);
actualAclPar1LabelCount.put("l" + i, fooaclPar1LabelCount[i]);
actualAclPar1LabelCount.put("m" + i, baraclPar1LabelCount[i]);
}
assertNotEquals("Acl-ed and actual counts mustn't be same", actualLabelCount, actualAclLabelCount);
}
@Test
public void secureFacets() throws Exception {
createDataset(NUM_LEAF_NODES_FOR_LARGE_DATASET);
assertEventually(() -> assertEquals(actualAclLabelCount, getFacets()));
}
@Test
public void secureFacets_withOneLabelInaccessible() throws Exception {
createDataset(NUM_LEAF_NODES_FOR_LARGE_DATASET);
Node inaccessibleChild = deny(adminSession.getNode("/parent").addNode("par4")).addNode("c0");
inaccessibleChild.setProperty("cons", "val");
inaccessibleChild.setProperty("foo", "l4");
adminSession.save();
assertEventually(() -> assertEquals(actualAclLabelCount, getFacets()));
}
@Test
public void insecureFacets() throws Exception {
Node facetConfig = getOrCreateByPath(indexNode.getPath() + "/" + FACETS, "nt:unstructured", adminSession);
facetConfig.setProperty(PROP_SECURE_FACETS, PROP_SECURE_FACETS_VALUE_INSECURE);
adminSession.save();
createDataset(NUM_LEAF_NODES_FOR_LARGE_DATASET);
assertEventually(() -> assertEquals(actualLabelCount, getFacets()));
}
@Test
public void statisticalFacets() throws Exception {
Node facetConfig = getOrCreateByPath(indexNode.getPath() + "/" + FACETS, "nt:unstructured", adminSession);
facetConfig.setProperty(PROP_SECURE_FACETS, PROP_SECURE_FACETS_VALUE_STATISTICAL);
facetConfig.setProperty(PROP_STATISTICAL_FACET_SAMPLE_SIZE, 3000);
adminSession.save();
createDataset(NUM_LEAF_NODES_FOR_LARGE_DATASET);
assertEventually(() -> assertEquals("Unexpected number of facets", actualAclLabelCount.size(), getFacets().size()));
for (Map.Entry<String, Integer> facet : actualAclLabelCount.entrySet()) {
String facetLabel = facet.getKey();
assertEventually(() -> {
int facetCount = getFacets().get(facetLabel);
float ratio = ((float) facetCount) / facet.getValue();
assertTrue("Facet count for label: " + facetLabel + " is outside of 10% margin of error. " +
"Expected: " + facet.getValue() + "; Got: " + facetCount + "; Ratio: " + ratio,
Math.abs(ratio - 1) < 0.1);
});
}
}
@Test
public void statisticalFacetsWithHitCountLessThanSampleSize() throws Exception {
Node facetConfig = getOrCreateByPath(indexNode.getPath() + "/" + FACETS, "nt:unstructured", adminSession);
facetConfig.setProperty(PROP_SECURE_FACETS, PROP_SECURE_FACETS_VALUE_STATISTICAL);
indexNode.setProperty(PROP_REFRESH_DEFN, true);
adminSession.save();
createDataset(NUM_LEAF_NODES_FOR_SMALL_DATASET);
assertEventually(() -> assertEquals("Unexpected number of facets", actualAclLabelCount.size(), getFacets().size()));
// Since the hit count is less than sample size -> flow should have switched to secure facet count instead of statistical
// and thus the count should be exactly equal
assertEventually(() -> assertEquals(actualAclLabelCount, getFacets()));
}
//TODO Test is failing with lucene index.
@Ignore
@Test
public void statisticalFacets_withHitCountSameAsSampleSize() throws Exception {
Node facetConfig = getOrCreateByPath(indexNode.getPath() + "/" + FACETS, "nt:unstructured", adminSession);
facetConfig.setProperty(PROP_SECURE_FACETS, PROP_SECURE_FACETS_VALUE_STATISTICAL);
indexNode.setProperty(PROP_REFRESH_DEFN, true);
adminSession.save();
createDataset(NUM_LEAF_NODES_FOR_LARGE_DATASET);
assertEventually(() -> {
Map<String, Integer> facets = getFacets("/parent/par1");
assertEquals("Unexpected number of facets", actualAclPar1LabelCount.size(), facets.size());
for (Map.Entry<String, Integer> facet : actualAclPar1LabelCount.entrySet()) {
String facetLabel = facet.getKey();
int facetCount = facets.get(facetLabel);
float ratio = ((float) facetCount) / facet.getValue();
assertTrue("Facet count for label: " + facetLabel + " is outside of 10% margin of error. " +
"Expected: " + facet.getValue() + "; Got: " + facetCount + "; Ratio: " + ratio,
Math.abs(ratio - 1) < 0.1);
}
});
}
@Test
public void statisticalFacets_withOneLabelInaccessible() throws Exception {
Node facetConfig = getOrCreateByPath(indexNode.getPath() + "/" + FACETS, "nt:unstructured", adminSession);
facetConfig.setProperty(PROP_SECURE_FACETS, PROP_SECURE_FACETS_VALUE_STATISTICAL);
indexNode.setProperty(PROP_REFRESH_DEFN, true);
adminSession.save();
createDataset(NUM_LEAF_NODES_FOR_LARGE_DATASET);
Node inaccessibleChild = deny(adminSession.getNode("/parent").addNode("par4")).addNode("c0");
inaccessibleChild.setProperty("cons", "val");
inaccessibleChild.setProperty("foo", "l4");
adminSession.save();
assertEventually(() -> {
Map<String, Integer> facets = getFacets();
assertEquals("Unexpected number of facets", actualAclLabelCount.size(), facets.size());
});
for (Map.Entry<String, Integer> facet : actualAclLabelCount.entrySet()) {
assertEventually(() -> {
String facetLabel = facet.getKey();
int facetCount = getFacets().get(facetLabel);
float ratio = ((float) facetCount) / facet.getValue();
assertTrue("Facet count for label: " + facetLabel + " is outside of 10% margin of error. " +
"Expected: " + facet.getValue() + "; Got: " + facetCount + "; Ratio: " + ratio,
Math.abs(ratio - 1) < 0.1);
});
}
}
@Test
public void secureFacets_withAdminSession() throws Exception {
Node facetConfig = getOrCreateByPath(indexNode.getPath() + "/" + FACETS, "nt:unstructured", adminSession);
facetConfig.setProperty(PROP_SECURE_FACETS, PROP_SECURE_FACETS_VALUE_INSECURE);
indexNode.setProperty(PROP_REFRESH_DEFN, true);
adminSession.save();
createDataset(NUM_LEAF_NODES_FOR_LARGE_DATASET);
qm = adminSession.getWorkspace().getQueryManager();
assertEventually(() -> assertEquals(actualLabelCount, getFacets()));
}
@Test
public void statisticalFacets_withAdminSession() throws Exception {
Node facetConfig = getOrCreateByPath(indexNode.getPath() + "/" + FACETS, "nt:unstructured", adminSession);
facetConfig.setProperty(PROP_SECURE_FACETS, PROP_SECURE_FACETS_VALUE_STATISTICAL);
indexNode.setProperty(PROP_REFRESH_DEFN, true);
adminSession.save();
createDataset(NUM_LEAF_NODES_FOR_LARGE_DATASET);
qm = adminSession.getWorkspace().getQueryManager();
assertEventually(() -> {
Map<String, Integer> facets = getFacets();
assertEquals("Unexpected number of facets", actualLabelCount.size(), facets.size());
});
for (Map.Entry<String, Integer> facet : actualLabelCount.entrySet()) {
assertEventually(() -> {
String facetLabel = facet.getKey();
int facetCount = getFacets().get(facetLabel);
float ratio = ((float) facetCount) / facet.getValue();
assertTrue("Facet count for label: " + facetLabel + " is outside of 5% margin of error. " +
"Expected: " + facet.getValue() + "; Got: " + facetCount + "; Ratio: " + ratio,
Math.abs(ratio - 1) < 0.05);
});
}
}
private Map<String, Integer> getFacets() {
return getFacets(null);
}
private Node deny(Node node) throws RepositoryException {
AccessControlUtils.deny(node, "anonymous", Privilege.JCR_ALL);
return node;
}
private Node allow(Node node) throws RepositoryException {
AccessControlUtils.allow(node, "anonymous", Privilege.JCR_READ);
return node;
}
private Map<String, Integer> getFacets(String path) {
String pathCons = "";
if (path != null) {
pathCons = " AND ISDESCENDANTNODE('" + path + "')";
}
String query = "SELECT [rep:facet(foo)], [rep:facet(bar)] FROM [nt:base] WHERE [cons] = 'val'" + pathCons;
Query q;
QueryResult queryResult;
try {
q = qm.createQuery(query, Query.JCR_SQL2);
queryResult = q.execute();
} catch (RepositoryException e) {
throw new RuntimeException(e);
}
long start = LOG_PERF.start("Getting the Facet Results...");
FacetResult facetResult = new FacetResult(queryResult);
LOG_PERF.end(start, -1, "Facet Results fetched");
return facetResult.getDimensions()
.stream()
.flatMap(dim -> Objects.requireNonNull(facetResult.getFacets(dim)).stream())
.collect(Collectors.toMap(FacetResult.Facet::getLabel, FacetResult.Facet::getCount));
}
protected void assertEventually(Runnable r) {
TestUtils.assertEventually(r, ((repositoryOptionsUtil.isAsync() ? repositoryOptionsUtil.defaultAsyncIndexingTimeInSeconds : 0) + 3000) * 5);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.